diff --git a/.ci/init.gradle b/.ci/init.gradle index e59541fe6edd..28a04308d184 100644 --- a/.ci/init.gradle +++ b/.ci/init.gradle @@ -10,6 +10,8 @@ initscript { } } +boolean USE_ARTIFACTORY=false + ['VAULT_ADDR', 'VAULT_ROLE_ID', 'VAULT_SECRET_ID'].each { if (System.env."$it" == null) { throw new GradleException("$it must be set!") @@ -37,39 +39,44 @@ final Vault vault = new Vault( ) .withRetries(5, 1000) -final Map artifactoryCredentials = vault.logical() - .read("secret/elasticsearch-ci/artifactory.elstc.co") - .getData(); -logger.info("Using elastic artifactory repos") -Closure configCache = { - return { - name "artifactory-gradle-release" - url "https://artifactory.elstc.co/artifactory/gradle-release" - credentials { - username artifactoryCredentials.get("username") - password artifactoryCredentials.get("token") +if (USE_ARTIFACTORY) { + final Map artifactoryCredentials = vault.logical() + .read("secret/elasticsearch-ci/artifactory.elstc.co") + .getData(); + logger.info("Using elastic artifactory repos") + Closure configCache = { + return { + name "artifactory-gradle-release" + url "https://artifactory.elstc.co/artifactory/gradle-release" + credentials { + username artifactoryCredentials.get("username") + password artifactoryCredentials.get("token") + } } } -} -settingsEvaluated { settings -> - settings.pluginManagement { - repositories { - maven configCache() - } - } -} -projectsLoaded { - allprojects { - buildscript { + settingsEvaluated { settings -> + settings.pluginManagement { repositories { maven configCache() } } - repositories { - maven configCache() + } + projectsLoaded { + allprojects { + buildscript { + repositories { + maven configCache() + } + } + repositories { + maven configCache() + } } } +} + +projectsLoaded { rootProject { project.pluginManager.withPlugin('com.gradle.build-scan') { buildScan.server = 'https://gradle-enterprise.elastic.co' @@ -77,6 +84,7 @@ projectsLoaded { } } + final String buildCacheUrl = System.getProperty('org.elasticsearch.build.cache.url') final boolean buildCachePush = Boolean.valueOf(System.getProperty('org.elasticsearch.build.cache.push', 'false')) diff --git a/.eclipseformat.xml b/.eclipseformat.xml new file mode 100644 index 000000000000..4df52d1b6036 --- /dev/null +++ b/.eclipseformat.xml @@ -0,0 +1,362 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c2bae4e20625..f96ef5dd1d5e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -155,19 +155,68 @@ For Eclipse, go to `Preferences->Java->Installed JREs` and add `-ea` to ### Java Language Formatting Guidelines +Java files in the Elasticsearch codebase are formatted with the Eclipse JDT +formatter, using the [Spotless +Gradle](https://github.com/diffplug/spotless/tree/master/plugin-gradle) +plugin. This plugin is configured on a project-by-project basis, via +`build.gradle` in the root of the repository. So long as at least one +project is configured, the formatting check can be run explicitly with: + + ./gradlew spotlessJavaCheck + +The code can be formatted with: + + ./gradlew spotlessApply + +These tasks can also be run for specific subprojects, e.g. + + ./gradlew server:spotlessJavaCheck + Please follow these formatting guidelines: * Java indent is 4 spaces * Line width is 140 characters -* Lines of code surrounded by `// tag` and `// end` comments are included in the -documentation and should only be 76 characters wide not counting -leading indentation -* The rest is left to Java coding standards -* Disable “auto-format on save” to prevent unnecessary format changes. This makes reviews much harder as it generates unnecessary formatting changes. If your IDE supports formatting only modified chunks that is fine to do. -* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. This can be done automatically by your IDE: - * Eclipse: `Preferences->Java->Code Style->Organize Imports`. There are two boxes labeled "`Number of (static )? imports needed for .*`". Set their values to 99999 or some other absurdly high value. - * IntelliJ: `Preferences/Settings->Editor->Code Style->Java->Imports`. There are two configuration options: `Class count to use import with '*'` and `Names count to use static import with '*'`. Set their values to 99999 or some other absurdly high value. -* Don't worry too much about import order. Try not to change it but don't worry about fighting your IDE to stop it from doing so. +* Lines of code surrounded by `// tag` and `// end` comments are included + in the documentation and should only be 76 characters wide not counting + leading indentation +* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause + the build to fail. This can be done automatically by your IDE: + * Eclipse: `Preferences->Java->Code Style->Organize Imports`. There are + two boxes labeled "`Number of (static )? imports needed for .*`". Set + their values to 99999 or some other absurdly high value. + * IntelliJ: `Preferences/Settings->Editor->Code Style->Java->Imports`. + There are two configuration options: `Class count to use import with + '*'` and `Names count to use static import with '*'`. Set their values + to 99999 or some other absurdly high value. + +#### Editor / IDE Support + +Eclipse IDEs can import the file [elasticsearch.eclipseformat.xml] +directly. + +IntelliJ IDEs can +[import](https://blog.jetbrains.com/idea/2014/01/intellij-idea-13-importing-code-formatter-settings-from-eclipse/) +the same settings file, and / or use the [Eclipse Code +Formatter](https://plugins.jetbrains.com/plugin/6546-eclipse-code-formatter) +plugin. + +You can also tell Spotless to [format a specific +file](https://github.com/diffplug/spotless/tree/master/plugin-gradle#can-i-apply-spotless-to-specific-files) +from the command line. + +#### Formatting failures + +Sometimes Spotless will report a "misbehaving rule which can't make up its +mind" and will recommend enabling the `paddedCell()` setting. If you +enabled this settings and run the format check again, +Spotless will write files to +`$PROJECT/build/spotless-diagnose-java/` to aid diagnosis. It writes +different copies of the formatted files, so that you can see how they +differ and infer what is the problem. + +The `paddedCell() option is disabled for normal operation in order to +detect any misbehaviour. You can enabled the option from the command line +by running Gradle with `-Dspotless.paddedcell`. ### License Headers diff --git a/build.gradle b/build.gradle index a826ecb4475e..34d43545bce2 100644 --- a/build.gradle +++ b/build.gradle @@ -35,6 +35,7 @@ plugins { id 'com.gradle.build-scan' version '2.4' id 'lifecycle-base' id 'elasticsearch.global-build-info' + id "com.diffplug.gradle.spotless" version "3.24.2" apply false } apply plugin: 'nebula.info-scm' @@ -98,6 +99,34 @@ subprojects { plugins.withType(BuildPlugin).whenPluginAdded { project.licenseFile = project.rootProject.file('licenses/APACHE-LICENSE-2.0.txt') project.noticeFile = project.rootProject.file('NOTICE.txt') + + // Projects that should be formatted and checked with Spotless are + // listed here, by project path. Once the number of formatted projects + // is greater than the number of unformatted projects, this can be + // switched to an exclude list, and eventualy removed completely. + def projectPathsToFormat = [ + // ':build-tools' + ] + + if (projectPathsToFormat.contains(project.path)) { + project.apply plugin: "com.diffplug.gradle.spotless" + + spotless { + java { + + removeUnusedImports() + eclipse().configFile rootProject.file('.eclipseformat.xml') + trimTrailingWhitespace() + + // See CONTRIBUTING.md for details of when to enabled this. + if (System.getProperty('spotless.paddedcell') != null) { + paddedCell() + } + } + } + + precommit.dependsOn 'spotlessJavaCheck' + } } } diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index e9c21ce68341..293d2ccd3571 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -112,7 +112,7 @@ dependencies { compile 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3' compile 'com.netflix.nebula:nebula-publishing-plugin:4.4.4' compile 'com.netflix.nebula:gradle-info-plugin:3.0.3' - compile 'org.eclipse.jgit:org.eclipse.jgit:3.2.0.201312181205-r' + compile 'org.eclipse.jgit:org.eclipse.jgit:5.5.0.201909110433-r' compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE.... compile 'org.apache.rat:apache-rat:0.11' compile "org.elasticsearch:jna:4.5.1" diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy index 54db2cd71628..b42bc83b4725 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy @@ -206,8 +206,11 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { response(snippet) return } - if (snippet.test || snippet.console || - snippet.language == 'console') { + if ((snippet.language == 'js') && (snippet.console)) { + throw new InvalidUserDataException( + "$snippet: Use `[source,console]` instead of `// CONSOLE`.") + } + if (snippet.test || snippet.language == 'console') { test(snippet) previousTest = snippet return diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 45752e8ace0a..9e9f93b22ddd 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -50,6 +50,7 @@ class PluginBuildPlugin implements Plugin { @Override void apply(Project project) { project.pluginManager.apply(BuildPlugin) + project.pluginManager.apply(TestClustersPlugin) PluginPropertiesExtension extension = project.extensions.create(PLUGIN_EXTENSION_NAME, PluginPropertiesExtension, project) configureDependencies(project) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index e3df285503c2..365f6b82e664 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -26,35 +26,18 @@ import org.elasticsearch.gradle.tool.Boilerplate import org.elasticsearch.gradle.tool.ClasspathUtils import org.gradle.api.DefaultTask import org.gradle.api.Task -import org.gradle.api.execution.TaskExecutionAdapter import org.gradle.api.file.FileCopyDetails -import org.gradle.api.logging.Logger -import org.gradle.api.logging.Logging import org.gradle.api.tasks.Copy import org.gradle.api.tasks.Input -import org.gradle.api.tasks.TaskState -import org.gradle.api.tasks.options.Option import org.gradle.api.tasks.testing.Test import org.gradle.plugins.ide.idea.IdeaPlugin - -import java.nio.charset.StandardCharsets -import java.nio.file.Files -import java.util.stream.Stream - /** * A wrapper task around setting up a cluster and running rest tests. */ class RestIntegTestTask extends DefaultTask { - private static final Logger LOGGER = Logging.getLogger(RestIntegTestTask) - - protected ClusterConfiguration clusterConfig - protected Test runner - /** Info about nodes in the integ test cluster. Note this is *not* available until runtime. */ - List nodes - /** Flag indicating whether the rest tests in the rest spec should be run. */ @Input Boolean includePackaged = false @@ -62,18 +45,13 @@ class RestIntegTestTask extends DefaultTask { RestIntegTestTask() { runner = project.tasks.create("${name}Runner", RestTestRunnerTask.class) super.dependsOn(runner) - boolean usesTestclusters = project.plugins.hasPlugin(TestClustersPlugin.class) - if (usesTestclusters == false) { - clusterConfig = project.extensions.create("${name}Cluster", ClusterConfiguration.class, project) - runner.outputs.doNotCacheIf("Caching is disabled when using ClusterFormationTasks", { true }) - } else { - project.testClusters { + + project.testClusters { "$name" { javaHome = project.file(project.ext.runtimeJavaHome) } - } - runner.useCluster project.testClusters."$name" } + runner.useCluster project.testClusters."$name" runner.include('**/*IT.class') runner.systemProperty('tests.rest.load_packaged', 'false') @@ -82,40 +60,11 @@ class RestIntegTestTask extends DefaultTask { if (System.getProperty("tests.cluster") != null || System.getProperty("tests.clustername") != null) { throw new IllegalArgumentException("tests.rest.cluster, tests.cluster, and tests.clustername must all be null or non-null") } - if (usesTestclusters == true) { - ElasticsearchCluster cluster = project.testClusters."${name}" - runner.nonInputProperties.systemProperty('tests.rest.cluster', "${-> cluster.allHttpSocketURI.join(",")}") - runner.nonInputProperties.systemProperty('tests.cluster', "${-> cluster.transportPortURI}") - runner.nonInputProperties.systemProperty('tests.clustername', "${-> cluster.getName()}") - } else { - // we pass all nodes to the rest cluster to allow the clients to round-robin between them - // this is more realistic than just talking to a single node - runner.nonInputProperties.systemProperty('tests.rest.cluster', "${-> nodes.collect { it.httpUri() }.join(",")}") - runner.nonInputProperties.systemProperty('tests.config.dir', "${-> nodes[0].pathConf}") - // TODO: our "client" qa tests currently use the rest-test plugin. instead they should have their own plugin - // that sets up the test cluster and passes this transport uri instead of http uri. Until then, we pass - // both as separate sysprops - runner.nonInputProperties.systemProperty('tests.cluster', "${-> nodes[0].transportUri()}") - runner.nonInputProperties.systemProperty('tests.clustername', "${-> nodes[0].clusterName}") - // dump errors and warnings from cluster log on failure - TaskExecutionAdapter logDumpListener = new TaskExecutionAdapter() { - @Override - void afterExecute(Task task, TaskState state) { - if (task == runner && state.failure != null) { - for (NodeInfo nodeInfo : nodes) { - printLogExcerpt(nodeInfo) - } - } - } - } - runner.doFirst { - project.gradle.addListener(logDumpListener) - } - runner.doLast { - project.gradle.removeListener(logDumpListener) - } - } + ElasticsearchCluster cluster = project.testClusters."${name}" + runner.nonInputProperties.systemProperty('tests.rest.cluster', "${-> cluster.allHttpSocketURI.join(",")}") + runner.nonInputProperties.systemProperty('tests.cluster', "${-> cluster.transportPortURI}") + runner.nonInputProperties.systemProperty('tests.clustername', "${-> cluster.getName()}") } else { if (System.getProperty("tests.cluster") == null || System.getProperty("tests.clustername") == null) { throw new IllegalArgumentException("tests.rest.cluster, tests.cluster, and tests.clustername must all be null or non-null") @@ -137,13 +86,6 @@ class RestIntegTestTask extends DefaultTask { runner.enabled = false return // no need to add cluster formation tasks if the task won't run! } - if (usesTestclusters == false) { - // only create the cluster if needed as otherwise an external cluster to use was specified - if (System.getProperty("tests.rest.cluster") == null) { - nodes = ClusterFormationTasks.setup(project, "${name}Cluster", runner, clusterConfig) - } - super.dependsOn(runner.finalizedBy) - } } } @@ -152,17 +94,6 @@ class RestIntegTestTask extends DefaultTask { includePackaged = include } - @Option( - option = "debug-jvm", - description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch." - ) - public void setDebug(boolean enabled) { - clusterConfig.debug = enabled; - } - - public List getNodes() { - return nodes - } @Override public Task dependsOn(Object... dependencies) { @@ -189,44 +120,6 @@ class RestIntegTestTask extends DefaultTask { project.tasks.getByName("${name}Runner").configure(configure) } - /** Print out an excerpt of the log from the given node. */ - protected static void printLogExcerpt(NodeInfo nodeInfo) { - File logFile = new File(nodeInfo.homeDir, "logs/${nodeInfo.clusterName}.log") - LOGGER.lifecycle("\nCluster ${nodeInfo.clusterName} - node ${nodeInfo.nodeNum} log excerpt:") - LOGGER.lifecycle("(full log at ${logFile})") - LOGGER.lifecycle('-----------------------------------------') - Stream stream = Files.lines(logFile.toPath(), StandardCharsets.UTF_8) - try { - boolean inStartup = true - boolean inExcerpt = false - int linesSkipped = 0 - for (String line : stream) { - if (line.startsWith("[")) { - inExcerpt = false // clear with the next log message - } - if (line =~ /(\[WARN *\])|(\[ERROR *\])/) { - inExcerpt = true // show warnings and errors - } - if (inStartup || inExcerpt) { - if (linesSkipped != 0) { - LOGGER.lifecycle("... SKIPPED ${linesSkipped} LINES ...") - } - LOGGER.lifecycle(line) - linesSkipped = 0 - } else { - ++linesSkipped - } - if (line =~ /recovered \[\d+\] indices into cluster_state/) { - inStartup = false - } - } - } finally { - stream.close() - } - LOGGER.lifecycle('=========================================') - - } - Copy createCopyRestSpecTask() { Boilerplate.maybeCreate(project.configurations, 'restSpec') { project.dependencies.add( diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestTestPlugin.groovy index 071b7b3db783..3e56046e49f4 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestTestPlugin.groovy @@ -19,6 +19,7 @@ package org.elasticsearch.gradle.test import org.elasticsearch.gradle.BuildPlugin +import org.elasticsearch.gradle.testclusters.TestClustersPlugin import org.gradle.api.InvalidUserDataException import org.gradle.api.Plugin import org.gradle.api.Project @@ -43,6 +44,7 @@ public class RestTestPlugin implements Plugin { + 'elasticsearch.standalone-rest-test') } + project.pluginManager.apply(TestClustersPlugin) RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class) integTest.description = 'Runs rest tests against an elasticsearch cluster.' integTest.group = JavaBasePlugin.VERIFICATION_GROUP diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy index cfe610fa568c..539b6fa7632d 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy @@ -20,13 +20,12 @@ package org.elasticsearch.gradle.test - import groovy.transform.CompileStatic import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask -import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.info.GlobalBuildInfoPlugin import org.elasticsearch.gradle.precommit.PrecommitTasks +import org.elasticsearch.gradle.testclusters.TestClustersPlugin import org.gradle.api.InvalidUserDataException import org.gradle.api.JavaVersion import org.gradle.api.Plugin @@ -42,7 +41,6 @@ import org.gradle.api.tasks.compile.JavaCompile import org.gradle.api.tasks.testing.Test import org.gradle.plugins.ide.eclipse.model.EclipseModel import org.gradle.plugins.ide.idea.model.IdeaModel - /** * Configures the build to compile tests against Elasticsearch's test framework * and run REST tests. Use BuildPlugin if you want to build main code as well @@ -60,6 +58,7 @@ class StandaloneRestTestPlugin implements Plugin { } project.rootProject.pluginManager.apply(GlobalBuildInfoPlugin) project.pluginManager.apply(JavaBasePlugin) + project.pluginManager.apply(TestClustersPlugin) project.getTasks().create("buildResources", ExportElasticsearchBuildResourcesTask) BuildPlugin.configureRepositories(project) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/TestWithDependenciesPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/TestWithDependenciesPlugin.groovy index 7e370fd69e2d..35c143c1421a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/TestWithDependenciesPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/TestWithDependenciesPlugin.groovy @@ -56,7 +56,8 @@ class TestWithDependenciesPlugin implements Plugin { private static addPluginResources(Project project, Project pluginProject) { String outputDir = "${project.buildDir}/generated-resources/${pluginProject.name}" - String taskName = ClusterFormationTasks.pluginTaskName("copy", pluginProject.name, "Metadata") + String camelName = pluginProject.name.replaceAll(/-(\w)/) { _, c -> c.toUpperCase(Locale.ROOT) } + String taskName = "copy" + camelName[0].toUpperCase(Locale.ROOT) + camelName.substring(1) + "Metadata" Copy copyPluginMetadata = project.tasks.create(taskName, Copy.class) copyPluginMetadata.into(outputDir) copyPluginMetadata.from(pluginProject.tasks.pluginProperties) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java index 95ad323ceda8..2f5028315a42 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/TestingConventionsTasks.java @@ -70,19 +70,13 @@ public class TestingConventionsTasks extends DefaultTask { } @Input - public Map> classFilesPerEnabledTask(FileTree testClassFiles) { - Map> collector = new HashMap<>(); - - // Gradle Test - collector.putAll( - getProject().getTasks().withType(Test.class).stream() - .filter(Task::getEnabled) - .collect(Collectors.toMap( - Task::getPath, - task -> task.getCandidateClassFiles().getFiles() - )) - ); - return Collections.unmodifiableMap(collector); + public Map> getClassFilesPerEnabledTask() { + return getProject().getTasks().withType(Test.class).stream() + .filter(Task::getEnabled) + .collect(Collectors.toMap( + Task::getPath, + task -> task.getCandidateClassFiles().getFiles() + )); } @Input @@ -154,7 +148,7 @@ public class TestingConventionsTasks extends DefaultTask { .collect(Collectors.toList()) ).getAsFileTree(); - final Map> classFilesPerTask = classFilesPerEnabledTask(allTestClassFiles); + final Map> classFilesPerTask = getClassFilesPerEnabledTask(); final Map>> testClassesPerTask = classFilesPerTask.entrySet().stream() .collect( diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index 6a85f9e1c4bf..587d5484fd80 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -968,12 +968,6 @@ public class ElasticsearchNode implements TestClusterConfiguration { // Don't wait for state, just start up quickly. This will also allow new and old nodes in the BWC case to become the master defaultConfig.put("discovery.initial_state_timeout", "0s"); - // TODO: Remove these once https://github.com/elastic/elasticsearch/issues/46091 is fixed - defaultConfig.put("logger.org.elasticsearch.action.support.master.TransportMasterNodeAction", "TRACE"); - defaultConfig.put("logger.org.elasticsearch.cluster.metadata.MetaDataCreateIndexService", "TRACE"); - defaultConfig.put("logger.org.elasticsearch.cluster.service", "DEBUG"); - defaultConfig.put("logger.org.elasticsearch.cluster.coordination", "DEBUG"); - defaultConfig.put("logger.org.elasticsearch.gateway.MetaStateService", "TRACE"); if (getVersion().getMajor() >= 8) { defaultConfig.put("cluster.service.slow_task_logging_threshold", "5s"); defaultConfig.put("cluster.service.slow_master_task_logging_threshold", "5s"); diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixtureExtension.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixtureExtension.java index b4ddcf0bed18..1521b7971333 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixtureExtension.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixtureExtension.java @@ -18,20 +18,65 @@ */ package org.elasticsearch.gradle.testfixtures; +import org.gradle.api.GradleException; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Project; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + public class TestFixtureExtension { private final Project project; final NamedDomainObjectContainer fixtures; + final Map serviceToProjectUseMap = new HashMap<>(); public TestFixtureExtension(Project project) { this.project = project; this.fixtures = project.container(Project.class); } + public void useFixture() { + useFixture(this.project.getPath()); + } + public void useFixture(String path) { + addFixtureProject(path); + serviceToProjectUseMap.put(path, this.project.getPath()); + } + + public void useFixture(String path, String serviceName) { + addFixtureProject(path); + String key = getServiceNameKey(path, serviceName); + serviceToProjectUseMap.put(key, this.project.getPath()); + + Optional otherProject = this.findOtherProjectUsingService(key); + if (otherProject.isPresent()) { + throw new GradleException( + "Projects " + otherProject.get() + " and " + this.project.getPath() + " both claim the "+ serviceName + + " service defined in the docker-compose.yml of " + path + "This is not supported because it breaks " + + "running in parallel. Configure dedicated services for each project and use those instead." + ); + } + } + + private String getServiceNameKey(String fixtureProjectPath, String serviceName) { + return fixtureProjectPath + "::" + serviceName; + } + + private Optional findOtherProjectUsingService(String serviceName) { + return this.project.getRootProject().getAllprojects().stream() + .filter(p -> p.equals(this.project) == false) + .filter(p -> p.getExtensions().findByType(TestFixtureExtension.class) != null) + .map(project -> project.getExtensions().getByType(TestFixtureExtension.class)) + .flatMap(ext -> ext.serviceToProjectUseMap.entrySet().stream()) + .filter(entry -> entry.getKey().equals(serviceName)) + .map(Map.Entry::getValue) + .findAny(); + } + + private void addFixtureProject(String path) { Project fixtureProject = this.project.findProject(path); if (fixtureProject == null) { throw new IllegalArgumentException("Could not find test fixture " + fixtureProject); @@ -42,6 +87,20 @@ public class TestFixtureExtension { ); } fixtures.add(fixtureProject); + // Check for exclusive access + Optional otherProject = this.findOtherProjectUsingService(path); + if (otherProject.isPresent()) { + throw new GradleException("Projects " + otherProject.get() + " and " + this.project.getPath() + " both " + + "claim all services from " + path + ". This is not supported because it breaks running in parallel. " + + "Configure specific services in docker-compose.yml for each and add the service name to `useFixture`" + ); + } } + boolean isServiceRequired(String serviceName, String fixtureProject) { + if (serviceToProjectUseMap.containsKey(fixtureProject)) { + return true; + } + return serviceToProjectUseMap.containsKey(getServiceNameKey(fixtureProject, serviceName)); + } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java index 556e938875e2..93c91cbee51d 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java @@ -20,6 +20,7 @@ package org.elasticsearch.gradle.testfixtures; import com.avast.gradle.dockercompose.ComposeExtension; import com.avast.gradle.dockercompose.DockerComposePlugin; +import com.avast.gradle.dockercompose.ServiceInfo; import com.avast.gradle.dockercompose.tasks.ComposeUp; import org.elasticsearch.gradle.OS; import org.elasticsearch.gradle.SystemPropertyCommandLineArgumentProvider; @@ -58,9 +59,6 @@ public class TestFixturesPlugin implements Plugin { ext.set("testFixturesDir", testfixturesDir); if (project.file(DOCKER_COMPOSE_YML).exists()) { - // the project that defined a test fixture can also use it - extension.fixtures.add(project); - Task buildFixture = project.getTasks().create("buildFixture"); Task pullFixture = project.getTasks().create("pullFixture"); Task preProcessFixture = project.getTasks().create("preProcessFixture"); @@ -106,6 +104,7 @@ public class TestFixturesPlugin implements Plugin { configureServiceInfoForTask( postProcessFixture, project, + false, (name, port) -> postProcessFixture.getExtensions() .getByType(ExtraPropertiesExtension.class).set(name, port) ); @@ -144,6 +143,7 @@ public class TestFixturesPlugin implements Plugin { configureServiceInfoForTask( task, fixtureProject, + true, (name, host) -> task.getExtensions().getByType(SystemPropertyCommandLineArgumentProvider.class).systemProperty(name, host) ); @@ -165,14 +165,23 @@ public class TestFixturesPlugin implements Plugin { ); } - private void configureServiceInfoForTask(Task task, Project fixtureProject, BiConsumer consumer) { + private void configureServiceInfoForTask( + Task task, Project fixtureProject, boolean enableFilter, BiConsumer consumer + ) { // Configure ports for the tests as system properties. // We only know these at execution time so we need to do it in doFirst + TestFixtureExtension extension = task.getProject().getExtensions().getByType(TestFixtureExtension.class); task.doFirst(new Action() { @Override public void execute(Task theTask) { fixtureProject.getExtensions().getByType(ComposeExtension.class).getServicesInfos() - .forEach((service, infos) -> { + .entrySet().stream() + .filter(entry -> enableFilter == false || + extension.isServiceRequired(entry.getKey(), fixtureProject.getPath()) + ) + .forEach(entry -> { + String service = entry.getKey(); + ServiceInfo infos = entry.getValue(); infos.getTcpPorts() .forEach((container, host) -> { String name = "test.fixtures." + service + ".tcp." + container; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphRequestConverters.java index 69a70bd97f17..48009810cf3b 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphRequestConverters.java @@ -29,7 +29,7 @@ final class GraphRequestConverters { private GraphRequestConverters() {} static Request explore(GraphExploreRequest exploreRequest) throws IOException { - String endpoint = RequestConverters.endpoint(exploreRequest.indices(), exploreRequest.types(), "_graph/explore"); + String endpoint = RequestConverters.endpoint(exploreRequest.indices(), "_graph/explore"); Request request = new Request(HttpGet.METHOD_NAME, endpoint); request.setEntity(RequestConverters.createEntity(exploreRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); return request; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndexLifecycleRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndexLifecycleRequestConverters.java index 563f178711e4..e6105a54bb26 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndexLifecycleRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndexLifecycleRequestConverters.java @@ -204,7 +204,7 @@ final class IndexLifecycleRequestConverters { } static Request executeSnapshotLifecyclePolicy(ExecuteSnapshotLifecyclePolicyRequest executeSnapshotLifecyclePolicyRequest) { - Request request = new Request(HttpPut.METHOD_NAME, + Request request = new Request(HttpPost.METHOD_NAME, new RequestConverters.EndpointBuilder() .addPathPartAsIs("_slm/policy") .addPathPartAsIs(executeSnapshotLifecyclePolicyRequest.getPolicyId()) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java index 62f041d6c680..ba8598370453 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java @@ -544,8 +544,7 @@ final class IndicesRequestConverters { static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws IOException { String[] indices = validateQueryRequest.indices() == null ? Strings.EMPTY_ARRAY : validateQueryRequest.indices(); - String[] types = validateQueryRequest.types() == null || indices.length <= 0 ? Strings.EMPTY_ARRAY : validateQueryRequest.types(); - String endpoint = RequestConverters.endpoint(indices, types, "_validate/query"); + String endpoint = RequestConverters.endpoint(indices, "_validate/query"); Request request = new Request(HttpGet.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); params.withIndicesOptions(validateQueryRequest.indicesOptions()); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index e42d0f30b4ce..0bf14d07c2e9 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -504,9 +504,7 @@ final class RequestConverters { } static Request explain(ExplainRequest explainRequest) throws IOException { - String endpoint = explainRequest.type().equals(MapperService.SINGLE_MAPPING_NAME) - ? endpoint(explainRequest.index(), "_explain", explainRequest.id()) - : endpoint(explainRequest.index(), explainRequest.type(), explainRequest.id(), "_explain"); + String endpoint = endpoint(explainRequest.index(), "_explain", explainRequest.id()); Request request = new Request(HttpGet.METHOD_NAME, endpoint); Params params = new Params(); @@ -547,6 +545,10 @@ final class RequestConverters { return prepareReindexRequest(reindexRequest, false); } + static Request submitDeleteByQuery(DeleteByQueryRequest deleteByQueryRequest) throws IOException { + return prepareDeleteByQueryRequest(deleteByQueryRequest, false); + } + private static Request prepareReindexRequest(ReindexRequest reindexRequest, boolean waitForCompletion) throws IOException { String endpoint = new EndpointBuilder().addPathPart("_reindex").build(); Request request = new Request(HttpPost.METHOD_NAME, endpoint); @@ -566,6 +568,35 @@ final class RequestConverters { return request; } + private static Request prepareDeleteByQueryRequest(DeleteByQueryRequest deleteByQueryRequest, + boolean waitForCompletion) throws IOException { + String endpoint = endpoint(deleteByQueryRequest.indices(), "_delete_by_query"); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + Params params = new Params() + .withRouting(deleteByQueryRequest.getRouting()) + .withRefresh(deleteByQueryRequest.isRefresh()) + .withTimeout(deleteByQueryRequest.getTimeout()) + .withWaitForActiveShards(deleteByQueryRequest.getWaitForActiveShards()) + .withRequestsPerSecond(deleteByQueryRequest.getRequestsPerSecond()) + .withIndicesOptions(deleteByQueryRequest.indicesOptions()) + .withWaitForCompletion(waitForCompletion); + if (deleteByQueryRequest.isAbortOnVersionConflict() == false) { + params.putParam("conflicts", "proceed"); + } + if (deleteByQueryRequest.getBatchSize() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE) { + params.putParam("scroll_size", Integer.toString(deleteByQueryRequest.getBatchSize())); + } + if (deleteByQueryRequest.getScrollTime() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT) { + params.putParam("scroll", deleteByQueryRequest.getScrollTime()); + } + if (deleteByQueryRequest.getMaxDocs() > 0) { + params.putParam("max_docs", Integer.toString(deleteByQueryRequest.getMaxDocs())); + } + request.addParameters(params.asMap()); + request.setEntity(createEntity(deleteByQueryRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request updateByQuery(UpdateByQueryRequest updateByQueryRequest) throws IOException { String endpoint = endpoint(updateByQueryRequest.indices(), "_update_by_query"); Request request = new Request(HttpPost.METHOD_NAME, endpoint); @@ -595,30 +626,7 @@ final class RequestConverters { } static Request deleteByQuery(DeleteByQueryRequest deleteByQueryRequest) throws IOException { - String endpoint = endpoint(deleteByQueryRequest.indices(), "_delete_by_query"); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - Params params = new Params() - .withRouting(deleteByQueryRequest.getRouting()) - .withRefresh(deleteByQueryRequest.isRefresh()) - .withTimeout(deleteByQueryRequest.getTimeout()) - .withWaitForActiveShards(deleteByQueryRequest.getWaitForActiveShards()) - .withRequestsPerSecond(deleteByQueryRequest.getRequestsPerSecond()) - .withIndicesOptions(deleteByQueryRequest.indicesOptions()); - if (deleteByQueryRequest.isAbortOnVersionConflict() == false) { - params.putParam("conflicts", "proceed"); - } - if (deleteByQueryRequest.getBatchSize() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE) { - params.putParam("scroll_size", Integer.toString(deleteByQueryRequest.getBatchSize())); - } - if (deleteByQueryRequest.getScrollTime() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT) { - params.putParam("scroll", deleteByQueryRequest.getScrollTime()); - } - if (deleteByQueryRequest.getMaxDocs() > 0) { - params.putParam("max_docs", Integer.toString(deleteByQueryRequest.getMaxDocs())); - } - request.addParameters(params.asMap()); - request.setEntity(createEntity(deleteByQueryRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; + return prepareDeleteByQueryRequest(deleteByQueryRequest, true); } static Request rethrottleReindex(RethrottleRequest rethrottleRequest) { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 093d717389f1..3986d4e8f13d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -256,7 +256,7 @@ public class RestHighLevelClient implements Closeable { private final IndexLifecycleClient ilmClient = new IndexLifecycleClient(this); private final RollupClient rollupClient = new RollupClient(this); private final CcrClient ccrClient = new CcrClient(this); - private final DataFrameClient dataFrameClient = new DataFrameClient(this); + private final TransformClient transformClient = new TransformClient(this); private final EnrichClient enrichClient = new EnrichClient(this); /** @@ -478,8 +478,8 @@ public class RestHighLevelClient implements Closeable { * * @return the client wrapper for making Data Frame API calls */ - public DataFrameClient dataFrame() { - return dataFrameClient; + public TransformClient transform() { + return transformClient; } public EnrichClient enrich() { @@ -595,6 +595,21 @@ public class RestHighLevelClient implements Closeable { ); } + /** + * Submits a delete by query task + * See + * Delete By Query API on elastic.co + * @param deleteByQueryRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the submission response + */ + public final TaskSubmissionResponse submitDeleteByQueryTask(DeleteByQueryRequest deleteByQueryRequest, + RequestOptions options) throws IOException { + return performRequestAndParseEntity( + deleteByQueryRequest, RequestConverters::submitDeleteByQuery, options, TaskSubmissionResponse::fromXContent, emptySet() + ); + } + /** * Asynchronously executes a delete by query request. * See diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/TransformClient.java similarity index 65% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameClient.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/TransformClient.java index e56bd4422928..9b18fbc06af0 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/TransformClient.java @@ -21,29 +21,29 @@ package org.elasticsearch.client; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.core.AcknowledgedResponse; -import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformResponse; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsResponse; -import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest; -import org.elasticsearch.client.transform.PreviewDataFrameTransformResponse; -import org.elasticsearch.client.transform.PutDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformResponse; -import org.elasticsearch.client.transform.StopDataFrameTransformRequest; -import org.elasticsearch.client.transform.StopDataFrameTransformResponse; -import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest; -import org.elasticsearch.client.transform.UpdateDataFrameTransformResponse; +import org.elasticsearch.client.transform.DeleteTransformRequest; +import org.elasticsearch.client.transform.GetTransformRequest; +import org.elasticsearch.client.transform.GetTransformResponse; +import org.elasticsearch.client.transform.GetTransformStatsRequest; +import org.elasticsearch.client.transform.GetTransformStatsResponse; +import org.elasticsearch.client.transform.PreviewTransformRequest; +import org.elasticsearch.client.transform.PreviewTransformResponse; +import org.elasticsearch.client.transform.PutTransformRequest; +import org.elasticsearch.client.transform.StartTransformRequest; +import org.elasticsearch.client.transform.StartTransformResponse; +import org.elasticsearch.client.transform.StopTransformRequest; +import org.elasticsearch.client.transform.StopTransformResponse; +import org.elasticsearch.client.transform.UpdateTransformRequest; +import org.elasticsearch.client.transform.UpdateTransformResponse; import java.io.IOException; import java.util.Collections; -public final class DataFrameClient { +public final class TransformClient { private final RestHighLevelClient restHighLevelClient; - DataFrameClient(RestHighLevelClient restHighLevelClient) { + TransformClient(RestHighLevelClient restHighLevelClient) { this.restHighLevelClient = restHighLevelClient; } @@ -54,15 +54,15 @@ public final class DataFrameClient { * see * Create transform documentation * - * @param request The PutDataFrameTransformRequest containing the - * {@link org.elasticsearch.client.transform.transforms.DataFrameTransformConfig}. + * @param request The PutTransformRequest containing the + * {@link org.elasticsearch.client.transform.transforms.TransformConfig}. * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return An AcknowledgedResponse object indicating request success * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public AcknowledgedResponse putDataFrameTransform(PutDataFrameTransformRequest request, RequestOptions options) throws IOException { + public AcknowledgedResponse putTransform(PutTransformRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - DataFrameRequestConverters::putDataFrameTransform, + TransformRequestConverters::putTransform, options, AcknowledgedResponse::fromXContent, Collections.emptySet()); @@ -74,16 +74,16 @@ public final class DataFrameClient { * For additional info * see * Create transform documentation - * @param request The PutDataFrameTransformRequest containing the - * {@link org.elasticsearch.client.transform.transforms.DataFrameTransformConfig}. + * @param request The PutTransformRequest containing the + * {@link org.elasticsearch.client.transform.transforms.TransformConfig}. * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable putDataFrameTransformAsync(PutDataFrameTransformRequest request, RequestOptions options, - ActionListener listener) { + public Cancellable putTransformAsync(PutTransformRequest request, RequestOptions options, + ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, - DataFrameRequestConverters::putDataFrameTransform, + TransformRequestConverters::putTransform, options, AcknowledgedResponse::fromXContent, listener, @@ -97,18 +97,18 @@ public final class DataFrameClient { * see * Create transform documentation * - * @param request The UpdateDataFrameTransformRequest containing the - * {@link org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate}. + * @param request The UpdateTransformRequest containing the + * {@link org.elasticsearch.client.transform.transforms.TransformConfigUpdate}. * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return An UpdateDataFrameTransformResponse object containing the updated configuration + * @return An UpdateTransformResponse object containing the updated configuration * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public UpdateDataFrameTransformResponse updateDataFrameTransform(UpdateDataFrameTransformRequest request, - RequestOptions options) throws IOException { + public UpdateTransformResponse updateTransform(UpdateTransformRequest request, + RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - DataFrameRequestConverters::updateDataFrameTransform, + TransformRequestConverters::updateTransform, options, - UpdateDataFrameTransformResponse::fromXContent, + UpdateTransformResponse::fromXContent, Collections.emptySet()); } @@ -118,19 +118,19 @@ public final class DataFrameClient { * For additional info * see * Create transform documentation - * @param request The UpdateDataFrameTransformRequest containing the - * {@link org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate}. + * @param request The UpdateTransformRequest containing the + * {@link org.elasticsearch.client.transform.transforms.TransformConfigUpdate}. * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable updateDataFrameTransformAsync(UpdateDataFrameTransformRequest request, - RequestOptions options, - ActionListener listener) { + public Cancellable updateTransformAsync(UpdateTransformRequest request, + RequestOptions options, + ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, - DataFrameRequestConverters::updateDataFrameTransform, + TransformRequestConverters::updateTransform, options, - UpdateDataFrameTransformResponse::fromXContent, + UpdateTransformResponse::fromXContent, listener, Collections.emptySet()); } @@ -142,17 +142,17 @@ public final class DataFrameClient { * see * Get transform stats documentation * - * @param request Specifies the which transforms to get the stats for + * @param request Specifies which transforms to get the stats for * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return The transform stats * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public GetDataFrameTransformStatsResponse getDataFrameTransformStats(GetDataFrameTransformStatsRequest request, RequestOptions options) + public GetTransformStatsResponse getTransformStats(GetTransformStatsRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - DataFrameRequestConverters::getDataFrameTransformStats, + TransformRequestConverters::getTransformStats, options, - GetDataFrameTransformStatsResponse::fromXContent, + GetTransformStatsResponse::fromXContent, Collections.emptySet()); } @@ -162,17 +162,17 @@ public final class DataFrameClient { * For additional info * see * Get transform stats documentation - * @param request Specifies the which transforms to get the stats for + * @param request Specifies which transforms to get the stats for * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener Listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable getDataFrameTransformStatsAsync(GetDataFrameTransformStatsRequest request, RequestOptions options, - ActionListener listener) { + public Cancellable getTransformStatsAsync(GetTransformStatsRequest request, RequestOptions options, + ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, - DataFrameRequestConverters::getDataFrameTransformStats, + TransformRequestConverters::getTransformStats, options, - GetDataFrameTransformStatsResponse::fromXContent, + GetTransformStatsResponse::fromXContent, listener, Collections.emptySet()); } @@ -189,10 +189,10 @@ public final class DataFrameClient { * @return An AcknowledgedResponse object indicating request success * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public AcknowledgedResponse deleteDataFrameTransform(DeleteDataFrameTransformRequest request, RequestOptions options) + public AcknowledgedResponse deleteTransform(DeleteTransformRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - DataFrameRequestConverters::deleteDataFrameTransform, + TransformRequestConverters::deleteTransform, options, AcknowledgedResponse::fromXContent, Collections.emptySet()); @@ -209,10 +209,10 @@ public final class DataFrameClient { * @param listener Listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable deleteDataFrameTransformAsync(DeleteDataFrameTransformRequest request, RequestOptions options, - ActionListener listener) { + public Cancellable deleteTransformAsync(DeleteTransformRequest request, RequestOptions options, + ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, - DataFrameRequestConverters::deleteDataFrameTransform, + TransformRequestConverters::deleteTransform, options, AcknowledgedResponse::fromXContent, listener, @@ -231,12 +231,12 @@ public final class DataFrameClient { * @return A response containing the results of the applied transform * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public PreviewDataFrameTransformResponse previewDataFrameTransform(PreviewDataFrameTransformRequest request, RequestOptions options) + public PreviewTransformResponse previewTransform(PreviewTransformRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - DataFrameRequestConverters::previewDataFrameTransform, + TransformRequestConverters::previewTransform, options, - PreviewDataFrameTransformResponse::fromXContent, + PreviewTransformResponse::fromXContent, Collections.emptySet()); } @@ -250,12 +250,12 @@ public final class DataFrameClient { * @param listener Listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable previewDataFrameTransformAsync(PreviewDataFrameTransformRequest request, RequestOptions options, - ActionListener listener) { + public Cancellable previewTransformAsync(PreviewTransformRequest request, RequestOptions options, + ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, - DataFrameRequestConverters::previewDataFrameTransform, + TransformRequestConverters::previewTransform, options, - PreviewDataFrameTransformResponse::fromXContent, + PreviewTransformResponse::fromXContent, listener, Collections.emptySet()); } @@ -272,12 +272,12 @@ public final class DataFrameClient { * @return A response object indicating request success * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public StartDataFrameTransformResponse startDataFrameTransform(StartDataFrameTransformRequest request, RequestOptions options) + public StartTransformResponse startTransform(StartTransformRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - DataFrameRequestConverters::startDataFrameTransform, + TransformRequestConverters::startTransform, options, - StartDataFrameTransformResponse::fromXContent, + StartTransformResponse::fromXContent, Collections.emptySet()); } @@ -292,12 +292,12 @@ public final class DataFrameClient { * @param listener Listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable startDataFrameTransformAsync(StartDataFrameTransformRequest request, RequestOptions options, - ActionListener listener) { + public Cancellable startTransformAsync(StartTransformRequest request, RequestOptions options, + ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, - DataFrameRequestConverters::startDataFrameTransform, + TransformRequestConverters::startTransform, options, - StartDataFrameTransformResponse::fromXContent, + StartTransformResponse::fromXContent, listener, Collections.emptySet()); } @@ -314,12 +314,12 @@ public final class DataFrameClient { * @return A response object indicating request success * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public StopDataFrameTransformResponse stopDataFrameTransform(StopDataFrameTransformRequest request, RequestOptions options) + public StopTransformResponse stopTransform(StopTransformRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - DataFrameRequestConverters::stopDataFrameTransform, + TransformRequestConverters::stopTransform, options, - StopDataFrameTransformResponse::fromXContent, + StopTransformResponse::fromXContent, Collections.emptySet()); } @@ -334,12 +334,12 @@ public final class DataFrameClient { * @param listener Listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable stopDataFrameTransformAsync(StopDataFrameTransformRequest request, RequestOptions options, - ActionListener listener) { + public Cancellable stopTransformAsync(StopTransformRequest request, RequestOptions options, + ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, - DataFrameRequestConverters::stopDataFrameTransform, + TransformRequestConverters::stopTransform, options, - StopDataFrameTransformResponse::fromXContent, + StopTransformResponse::fromXContent, listener, Collections.emptySet()); } @@ -353,15 +353,15 @@ public final class DataFrameClient { * * @param request The get transform request * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return An GetDataFrameTransformResponse containing the requested transforms + * @return An GetTransformResponse containing the requested transforms * @throws IOException when there is a serialization issue sending the request or receiving the response */ - public GetDataFrameTransformResponse getDataFrameTransform(GetDataFrameTransformRequest request, RequestOptions options) + public GetTransformResponse getTransform(GetTransformRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, - DataFrameRequestConverters::getDataFrameTransform, + TransformRequestConverters::getTransform, options, - GetDataFrameTransformResponse::fromXContent, + GetTransformResponse::fromXContent, Collections.emptySet()); } @@ -376,12 +376,12 @@ public final class DataFrameClient { * @param listener Listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable getDataFrameTransformAsync(GetDataFrameTransformRequest request, RequestOptions options, - ActionListener listener) { + public Cancellable getTransformAsync(GetTransformRequest request, RequestOptions options, + ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity(request, - DataFrameRequestConverters::getDataFrameTransform, + TransformRequestConverters::getTransform, options, - GetDataFrameTransformResponse::fromXContent, + GetTransformResponse::fromXContent, listener, Collections.emptySet()); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/TransformRequestConverters.java similarity index 79% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameRequestConverters.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/TransformRequestConverters.java index ab8c79a1784b..49d347ae2b6e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/DataFrameRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/TransformRequestConverters.java @@ -24,29 +24,29 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest; -import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest; -import org.elasticsearch.client.transform.PutDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformRequest; -import org.elasticsearch.client.transform.StopDataFrameTransformRequest; -import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest; +import org.elasticsearch.client.transform.DeleteTransformRequest; +import org.elasticsearch.client.transform.GetTransformRequest; +import org.elasticsearch.client.transform.GetTransformStatsRequest; +import org.elasticsearch.client.transform.PreviewTransformRequest; +import org.elasticsearch.client.transform.PutTransformRequest; +import org.elasticsearch.client.transform.StartTransformRequest; +import org.elasticsearch.client.transform.StopTransformRequest; +import org.elasticsearch.client.transform.UpdateTransformRequest; import org.elasticsearch.common.Strings; import java.io.IOException; import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE; import static org.elasticsearch.client.RequestConverters.createEntity; -import static org.elasticsearch.client.transform.DeleteDataFrameTransformRequest.FORCE; -import static org.elasticsearch.client.transform.GetDataFrameTransformRequest.ALLOW_NO_MATCH; -import static org.elasticsearch.client.transform.PutDataFrameTransformRequest.DEFER_VALIDATION; +import static org.elasticsearch.client.transform.DeleteTransformRequest.FORCE; +import static org.elasticsearch.client.transform.GetTransformRequest.ALLOW_NO_MATCH; +import static org.elasticsearch.client.transform.PutTransformRequest.DEFER_VALIDATION; -final class DataFrameRequestConverters { +final class TransformRequestConverters { - private DataFrameRequestConverters() {} + private TransformRequestConverters() {} - static Request putDataFrameTransform(PutDataFrameTransformRequest putRequest) throws IOException { + static Request putTransform(PutTransformRequest putRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_frame", "transforms") .addPathPart(putRequest.getConfig().getId()) @@ -59,7 +59,7 @@ final class DataFrameRequestConverters { return request; } - static Request updateDataFrameTransform(UpdateDataFrameTransformRequest updateDataFrameTransformRequest) throws IOException { + static Request updateTransform(UpdateTransformRequest updateDataFrameTransformRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_frame", "transforms") .addPathPart(updateDataFrameTransformRequest.getId()) @@ -73,7 +73,7 @@ final class DataFrameRequestConverters { return request; } - static Request getDataFrameTransform(GetDataFrameTransformRequest getRequest) { + static Request getTransform(GetTransformRequest getRequest) { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_frame", "transforms") .addPathPart(Strings.collectionToCommaDelimitedString(getRequest.getId())) @@ -91,7 +91,7 @@ final class DataFrameRequestConverters { return request; } - static Request deleteDataFrameTransform(DeleteDataFrameTransformRequest deleteRequest) { + static Request deleteTransform(DeleteTransformRequest deleteRequest) { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_frame", "transforms") .addPathPart(deleteRequest.getId()) @@ -103,7 +103,7 @@ final class DataFrameRequestConverters { return request; } - static Request startDataFrameTransform(StartDataFrameTransformRequest startRequest) { + static Request startTransform(StartTransformRequest startRequest) { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_frame", "transforms") .addPathPart(startRequest.getId()) @@ -118,7 +118,7 @@ final class DataFrameRequestConverters { return request; } - static Request stopDataFrameTransform(StopDataFrameTransformRequest stopRequest) { + static Request stopTransform(StopTransformRequest stopRequest) { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_frame", "transforms") .addPathPart(stopRequest.getId()) @@ -139,7 +139,7 @@ final class DataFrameRequestConverters { return request; } - static Request previewDataFrameTransform(PreviewDataFrameTransformRequest previewRequest) throws IOException { + static Request previewTransform(PreviewTransformRequest previewRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_frame", "transforms", "_preview") .build(); @@ -148,7 +148,7 @@ final class DataFrameRequestConverters { return request; } - static Request getDataFrameTransformStats(GetDataFrameTransformStatsRequest statsRequest) { + static Request getTransformStats(GetTransformStatsRequest statsRequest) { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_data_frame", "transforms") .addPathPart(statsRequest.getId()) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/core/CountRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/core/CountRequest.java index 95ab3db7e71f..2d42b56bc818 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/core/CountRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/core/CountRequest.java @@ -19,10 +19,8 @@ package org.elasticsearch.client.core; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.Validatable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -39,7 +37,7 @@ import static org.elasticsearch.action.search.SearchRequest.DEFAULT_INDICES_OPTI /** * Encapsulates a request to _count API against one, several or all indices. */ -public final class CountRequest extends ActionRequest implements IndicesRequest.Replaceable, ToXContentObject { +public final class CountRequest implements Validatable, ToXContentObject { private String[] indices = Strings.EMPTY_ARRAY; private String[] types = Strings.EMPTY_ARRAY; @@ -78,11 +76,6 @@ public final class CountRequest extends ActionRequest implements IndicesRequest. this.query = Objects.requireNonNull(query, "query must not be null");; } - @Override - public ActionRequestValidationException validate() { - return null; - } - /** * Sets the indices the count will be executed on. */ diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/graph/GraphExploreRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/graph/GraphExploreRequest.java index 5a0a4c2a9208..c3230a75a58d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/graph/GraphExploreRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/graph/GraphExploreRequest.java @@ -47,7 +47,6 @@ public class GraphExploreRequest implements IndicesRequest.Replaceable, ToXConte public static final String NO_VERTICES_ERROR_MESSAGE = "Graph explore hop must have at least one VertexRequest"; private String[] indices = Strings.EMPTY_ARRAY; private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, false); - private String[] types = Strings.EMPTY_ARRAY; private String routing; private TimeValue timeout; @@ -106,31 +105,6 @@ public class GraphExploreRequest implements IndicesRequest.Replaceable, ToXConte return this; } - /** - * The document types to execute the explore against. Defaults to be executed against - * all types. - * - * @deprecated Types are in the process of being removed. Instead of using a type, prefer to - * filter on a field on the document. - */ - @Deprecated - public String[] types() { - return this.types; - } - - /** - * The document types to execute the explore request against. Defaults to be executed against - * all types. - * - * @deprecated Types are in the process of being removed. Instead of using a type, prefer to - * filter on a field on the document. - */ - @Deprecated - public GraphExploreRequest types(String... types) { - this.types = types; - return this; - } - public String routing() { return this.routing; } @@ -154,7 +128,7 @@ public class GraphExploreRequest implements IndicesRequest.Replaceable, ToXConte * operations involved in each hop are limited to the remaining time * available but can still overrun due to the nature of their "best efforts" * timeout support. When a timeout occurs partial results are returned. - * + * * @param timeout * a {@link TimeValue} object which determines the maximum length * of time to spend exploring @@ -174,7 +148,7 @@ public class GraphExploreRequest implements IndicesRequest.Replaceable, ToXConte @Override public String toString() { - return "graph explore [" + Arrays.toString(indices) + "][" + Arrays.toString(types) + "]"; + return "graph explore [" + Arrays.toString(indices) + "]"; } /** @@ -190,7 +164,7 @@ public class GraphExploreRequest implements IndicesRequest.Replaceable, ToXConte * better with smaller samples as there are less look-ups required for * background frequencies of terms found in the documents *

- * + * * @param maxNumberOfDocsPerHop * shard-level sample size in documents */ @@ -231,7 +205,7 @@ public class GraphExploreRequest implements IndicesRequest.Replaceable, ToXConte * default value is true which means terms are selected based on * significance (see the {@link SignificantTerms} aggregation) rather than * popularity (using the {@link TermsAggregator}). - * + * * @param value * true if the significant_terms algorithm should be used. */ @@ -246,7 +220,7 @@ public class GraphExploreRequest implements IndicesRequest.Replaceable, ToXConte /** * Return detailed information about vertex frequencies as part of JSON * results - defaults to false - * + * * @param value * true if detailed information is required in JSON responses */ @@ -262,7 +236,7 @@ public class GraphExploreRequest implements IndicesRequest.Replaceable, ToXConte * Add a stage in the graph exploration. Each hop represents a stage of * querying elasticsearch to identify terms which can then be connnected to * other terms in a subsequent hop. - * + * * @param guidingQuery * optional choice of query which influences which documents are * considered in this stage @@ -316,7 +290,7 @@ public class GraphExploreRequest implements IndicesRequest.Replaceable, ToXConte @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - + builder.startObject("controls"); { if (sampleSize != SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE) { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java index 36cf8afad0d5..0696ac7b5554 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java @@ -113,8 +113,6 @@ public class DetailAnalyzeResponse { private final String name; private final AnalyzeResponse.AnalyzeToken[] tokens; - private static final String TOKENS = "tokens"; - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutIndexTemplateRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutIndexTemplateRequest.java index 7008a719b7b3..2d03a08d2d42 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutIndexTemplateRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/PutIndexTemplateRequest.java @@ -20,15 +20,13 @@ package org.elasticsearch.client.indices; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.client.TimedRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContentFragment; @@ -50,13 +48,12 @@ import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; -import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; /** * A request to create an index template. */ -public class PutIndexTemplateRequest extends MasterNodeRequest implements IndicesRequest, ToXContentFragment { +public class PutIndexTemplateRequest extends TimedRequest implements ToXContentFragment { private String name; @@ -77,19 +74,11 @@ public class PutIndexTemplateRequest extends MasterNodeRequest indexPatterns) { this.name(name); - } - - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException validationException = null; - if (indexPatterns == null || indexPatterns.size() == 0) { - validationException = addValidationError("index patterns are missing", validationException); - } - return validationException; + this.patterns(indexPatterns); } /** @@ -111,6 +100,9 @@ public class PutIndexTemplateRequest extends MasterNodeRequest indexPatterns) { + if (indexPatterns == null || indexPatterns.size() == 0) { + throw new IllegalArgumentException("index patterns are missing"); + } this.indexPatterns = indexPatterns; return this; } @@ -412,14 +404,21 @@ public class PutIndexTemplateRequest extends MasterNodeRequest jobIds; private final String calendarId; @@ -61,11 +60,6 @@ public class DeleteCalendarJobRequest extends ActionRequest { return calendarId; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { return Objects.hash(jobIds, calendarId); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarRequest.java index 047561685fbf..4a1d7c11e16c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarRequest.java @@ -19,15 +19,14 @@ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import java.util.Objects; /** * Request to delete a Machine Learning Calendar */ -public class DeleteCalendarRequest extends ActionRequest { +public class DeleteCalendarRequest implements Validatable { private final String calendarId; @@ -43,11 +42,6 @@ public class DeleteCalendarRequest extends ActionRequest { return calendarId; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { return Objects.hash(calendarId); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java index 9782b73b5795..11cfc7f3e914 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java @@ -18,15 +18,14 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import java.util.Objects; /** * Request to delete a Machine Learning Datafeed via its ID */ -public class DeleteDatafeedRequest extends ActionRequest { +public class DeleteDatafeedRequest implements Validatable { private String datafeedId; private Boolean force; @@ -53,11 +52,6 @@ public class DeleteDatafeedRequest extends ActionRequest { this.force = force; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { return Objects.hash(datafeedId, force); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataRequest.java index 25e340a8bab1..4da2d52ee357 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataRequest.java @@ -18,13 +18,12 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; /** * Request to delete expired model snapshots and forecasts */ -public class DeleteExpiredDataRequest extends ActionRequest { +public class DeleteExpiredDataRequest implements Validatable { /** * Create a new request to delete expired data @@ -32,8 +31,4 @@ public class DeleteExpiredDataRequest extends ActionRequest { public DeleteExpiredDataRequest() { } - @Override - public ActionRequestValidationException validate() { - return null; - } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteForecastRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteForecastRequest.java index 9c6bccd191c1..e961c5791d4d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteForecastRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteForecastRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; @@ -37,7 +36,7 @@ import java.util.Objects; /** * POJO for a delete forecast request */ -public class DeleteForecastRequest extends ActionRequest implements ToXContentObject { +public class DeleteForecastRequest implements Validatable, ToXContentObject { public static final ParseField FORECAST_ID = new ParseField("forecast_id"); public static final ParseField ALLOW_NO_FORECASTS = new ParseField("allow_no_forecasts"); @@ -159,11 +158,6 @@ public class DeleteForecastRequest extends ActionRequest implements ToXContentOb return Objects.hash(jobId, forecastIds, allowNoForecasts, timeout); } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java index 44e3668059c4..2b7258270053 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java @@ -18,15 +18,14 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import java.util.Objects; /** * Request to delete a Machine Learning Job via its ID */ -public class DeleteJobRequest extends ActionRequest { +public class DeleteJobRequest implements Validatable { private String jobId; private Boolean force; @@ -76,11 +75,6 @@ public class DeleteJobRequest extends ActionRequest { this.waitForCompletion = waitForCompletion; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { return Objects.hash(jobId, force); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequest.java index 1c153e3555b1..081560fb03eb 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequest.java @@ -18,18 +18,16 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.client.ml.job.process.ModelSnapshot; - import java.util.Objects; /** * Request to delete a Machine Learning Model Snapshot Job via its Job and Snapshot IDs */ -public class DeleteModelSnapshotRequest extends ActionRequest { +public class DeleteModelSnapshotRequest implements Validatable { private final String jobId; private final String snapshotId; @@ -47,11 +45,6 @@ public class DeleteModelSnapshotRequest extends ActionRequest { return snapshotId; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { return Objects.hash(jobId, snapshotId); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java index 067851d45266..8436eb67be81 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -32,7 +31,7 @@ import java.util.Objects; /** * Request object to flush a given Machine Learning job. */ -public class FlushJobRequest extends ActionRequest implements ToXContentObject { +public class FlushJobRequest implements Validatable, ToXContentObject { public static final ParseField CALC_INTERIM = new ParseField("calc_interim"); public static final ParseField START = new ParseField("start"); @@ -188,8 +187,4 @@ public class FlushJobRequest extends ActionRequest implements ToXContentObject { return builder; } - @Override - public ActionRequestValidationException validate() { - return null; - } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java index 67d290c37f08..b9d2ceca43b5 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.unit.TimeValue; @@ -34,7 +33,7 @@ import java.util.Objects; /** * Pojo for forecasting an existing and open Machine Learning Job */ -public class ForecastJobRequest extends ActionRequest implements ToXContentObject { +public class ForecastJobRequest implements Validatable, ToXContentObject { public static final ParseField DURATION = new ParseField("duration"); public static final ParseField EXPIRES_IN = new ParseField("expires_in"); @@ -133,8 +132,4 @@ public class ForecastJobRequest extends ActionRequest implements ToXContentObjec return builder; } - @Override - public ActionRequestValidationException validate() { - return null; - } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java index 655ecb2b9750..1aa2c3a7755f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.client.ml.job.results.Result; @@ -34,7 +33,7 @@ import java.util.Objects; /** * A request to retrieve buckets of a given job */ -public class GetBucketsRequest extends ActionRequest implements ToXContentObject { +public class GetBucketsRequest implements Validatable, ToXContentObject { public static final ParseField EXPAND = new ParseField("expand"); public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); @@ -199,11 +198,6 @@ public class GetBucketsRequest extends ActionRequest implements ToXContentObject this.descending = descending; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java index 05bc234178eb..622ee51eb9e4 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java @@ -19,8 +19,7 @@ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.calendars.Calendar; import org.elasticsearch.client.ml.job.config.Job; @@ -35,7 +34,7 @@ import java.util.Objects; /** * Get the Scheduled Events for a Calendar */ -public class GetCalendarEventsRequest extends ActionRequest implements ToXContentObject { +public class GetCalendarEventsRequest implements Validatable, ToXContentObject { public static final ParseField START = new ParseField("start"); public static final ParseField END = new ParseField("end"); @@ -121,11 +120,6 @@ public class GetCalendarEventsRequest extends ActionRequest implements ToXConten this.jobId = jobId; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java index bcbae91693b9..defa3943c5d8 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java @@ -19,8 +19,7 @@ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.calendars.Calendar; import org.elasticsearch.common.xcontent.ObjectParser; @@ -30,7 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; import java.util.Objects; -public class GetCalendarsRequest extends ActionRequest implements ToXContentObject { +public class GetCalendarsRequest implements Validatable, ToXContentObject { public static final ObjectParser PARSER = new ObjectParser<>("get_calendars_request", GetCalendarsRequest::new); @@ -66,11 +65,6 @@ public class GetCalendarsRequest extends ActionRequest implements ToXContentObje this.pageParams = pageParams; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java index b1000c3e4eb4..f1d148c8ee39 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.common.ParseField; @@ -33,7 +32,7 @@ import java.util.Objects; /** * A request to retrieve categories of a given job */ -public class GetCategoriesRequest extends ActionRequest implements ToXContentObject { +public class GetCategoriesRequest implements Validatable, ToXContentObject { public static final ParseField CATEGORY_ID = new ParseField("category_id"); @@ -88,11 +87,6 @@ public class GetCategoriesRequest extends ActionRequest implements ToXContentObj this.pageParams = pageParams; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java index d8b88e026ccb..ab827b64c2d2 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.datafeed.DatafeedConfig; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -38,7 +37,7 @@ import java.util.Objects; * {@code _all} explicitly gets all the datafeeds in the cluster * An empty request (no {@code datafeedId}s) implicitly gets all the datafeeds in the cluster */ -public class GetDatafeedRequest extends ActionRequest implements ToXContentObject { +public class GetDatafeedRequest implements Validatable, ToXContentObject { public static final ParseField DATAFEED_IDS = new ParseField("datafeed_ids"); public static final ParseField ALLOW_NO_DATAFEEDS = new ParseField("allow_no_datafeeds"); @@ -101,11 +100,6 @@ public class GetDatafeedRequest extends ActionRequest implements ToXContentObjec return allowNoDatafeeds; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { return Objects.hash(datafeedIds, allowNoDatafeeds); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java index a3814d6c90c9..a44d6bf93c02 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.datafeed.DatafeedConfig; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; @@ -41,7 +40,7 @@ import java.util.Objects; * {@code _all} explicitly gets all the datafeeds' statistics in the cluster * An empty request (no {@code datafeedId}s) implicitly gets all the datafeeds' statistics in the cluster */ -public class GetDatafeedStatsRequest extends ActionRequest implements ToXContentObject { +public class GetDatafeedStatsRequest implements Validatable, ToXContentObject { public static final ParseField ALLOW_NO_DATAFEEDS = new ParseField("allow_no_datafeeds"); @@ -128,11 +127,6 @@ public class GetDatafeedStatsRequest extends ActionRequest implements ToXContent Objects.equals(allowNoDatafeeds, that.allowNoDatafeeds); } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java index a1808af23fca..98c7f9d54d0c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.job.config.MlFilter; import org.elasticsearch.common.xcontent.ObjectParser; @@ -32,7 +31,7 @@ import java.util.Objects; /** * A request to retrieve {@link MlFilter}s */ -public class GetFiltersRequest extends ActionRequest implements ToXContentObject { +public class GetFiltersRequest implements Validatable, ToXContentObject { public static final ObjectParser PARSER = new ObjectParser<>("get_filters_request", GetFiltersRequest::new); @@ -83,11 +82,6 @@ public class GetFiltersRequest extends ActionRequest implements ToXContentObject this.size = size; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java index 8e5a45e7ba38..0639fd9574c7 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.common.ParseField; @@ -33,7 +32,7 @@ import java.util.Objects; /** * A request to retrieve influencers of a given job */ -public class GetInfluencersRequest extends ActionRequest implements ToXContentObject { +public class GetInfluencersRequest implements Validatable, ToXContentObject { public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); public static final ParseField START = new ParseField("start"); @@ -167,11 +166,6 @@ public class GetInfluencersRequest extends ActionRequest implements ToXContentOb this.descending = descending; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java index 4d4c67b511bf..24684cd99c6f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -39,7 +38,7 @@ import java.util.Objects; * {@code _all} explicitly gets all the jobs in the cluster * An empty request (no {@code jobId}s) implicitly gets all the jobs in the cluster */ -public class GetJobRequest extends ActionRequest implements ToXContentObject { +public class GetJobRequest implements Validatable, ToXContentObject { public static final ParseField JOB_IDS = new ParseField("job_ids"); public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs"); @@ -101,11 +100,6 @@ public class GetJobRequest extends ActionRequest implements ToXContentObject { return allowNoJobs; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { return Objects.hash(jobIds, allowNoJobs); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java index 493e2bf5272e..d33972babb57 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; @@ -41,7 +40,7 @@ import java.util.Objects; * {@code _all} explicitly gets all the jobs' statistics in the cluster * An empty request (no {@code jobId}s) implicitly gets all the jobs' statistics in the cluster */ -public class GetJobStatsRequest extends ActionRequest implements ToXContentObject { +public class GetJobStatsRequest implements Validatable, ToXContentObject { public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs"); @@ -128,11 +127,6 @@ public class GetJobStatsRequest extends ActionRequest implements ToXContentObjec Objects.equals(allowNoJobs, that.allowNoJobs); } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java index acb138ac442f..7e11073cbc26 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.common.ParseField; @@ -33,7 +32,7 @@ import java.util.Objects; /** * A request to retrieve information about model snapshots for a given job */ -public class GetModelSnapshotsRequest extends ActionRequest implements ToXContentObject { +public class GetModelSnapshotsRequest implements Validatable, ToXContentObject { public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id"); @@ -153,11 +152,6 @@ public class GetModelSnapshotsRequest extends ActionRequest implements ToXConten this.desc = desc; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java index 36c0fe250e25..f34dcb3be8aa 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; @@ -37,7 +36,7 @@ import java.util.Objects; /** * A request to retrieve overall buckets of set of jobs */ -public class GetOverallBucketsRequest extends ActionRequest implements ToXContentObject { +public class GetOverallBucketsRequest implements Validatable, ToXContentObject { public static final ParseField TOP_N = new ParseField("top_n"); public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); @@ -203,11 +202,6 @@ public class GetOverallBucketsRequest extends ActionRequest implements ToXConten return allowNoJobs; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java index 5b8e68cd72dc..ae9489bce137 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; @@ -35,7 +34,7 @@ import java.util.Objects; /** * Request to open a Machine Learning Job */ -public class OpenJobRequest extends ActionRequest implements ToXContentObject { +public class OpenJobRequest implements Validatable, ToXContentObject { public static final ParseField TIMEOUT = new ParseField("timeout"); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( @@ -88,11 +87,6 @@ public class OpenJobRequest extends ActionRequest implements ToXContentObject { this.timeout = timeout; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventRequest.java index 2c43ec9ab776..943703414583 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.calendars.Calendar; import org.elasticsearch.client.ml.calendars.ScheduledEvent; import org.elasticsearch.common.ParseField; @@ -35,7 +34,7 @@ import java.util.Objects; /** * Request to add a ScheduledEvent to a Machine Learning calendar */ -public class PostCalendarEventRequest extends ActionRequest implements ToXContentObject { +public class PostCalendarEventRequest implements Validatable, ToXContentObject { private final String calendarId; private final List scheduledEvents; @@ -78,11 +77,6 @@ public class PostCalendarEventRequest extends ActionRequest implements ToXConten return scheduledEvents; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java index 519ac5e00510..e81464bc4282 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesArray; @@ -40,7 +39,7 @@ import java.util.Objects; /** * Request to post data to a Machine Learning job */ -public class PostDataRequest extends ActionRequest implements ToXContentObject { +public class PostDataRequest implements Validatable, ToXContentObject { public static final ParseField RESET_START = new ParseField("reset_start"); public static final ParseField RESET_END = new ParseField("reset_end"); @@ -159,11 +158,6 @@ public class PostDataRequest extends ActionRequest implements ToXContentObject { Objects.equals(xContentType, other.xContentType); } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java index a21e96b46423..8ec2df177d50 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.datafeed.DatafeedConfig; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -31,9 +30,9 @@ import java.io.IOException; import java.util.Objects; /** - * Request to preview a MachineLearning Datafeed + * Request to preview a MachineLearning Datafeed */ -public class PreviewDatafeedRequest extends ActionRequest implements ToXContentObject { +public class PreviewDatafeedRequest implements Validatable, ToXContentObject { public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "open_datafeed_request", true, a -> new PreviewDatafeedRequest((String) a[0])); @@ -61,11 +60,6 @@ public class PreviewDatafeedRequest extends ActionRequest implements ToXContentO return datafeedId; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarJobRequest.java index 5ec379868934..52fbee411651 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarJobRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarJobRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import java.security.InvalidParameterException; import java.util.Arrays; @@ -29,7 +28,7 @@ import java.util.Objects; /** * Request class for adding Machine Learning Jobs to an existing calendar */ -public class PutCalendarJobRequest extends ActionRequest { +public class PutCalendarJobRequest implements Validatable { private final List jobIds; private final String calendarId; @@ -61,11 +60,6 @@ public class PutCalendarJobRequest extends ActionRequest { return calendarId; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { return Objects.hash(jobIds, calendarId); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarRequest.java index 56d6b2b545b5..45f650b9cbe3 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarRequest.java @@ -19,8 +19,7 @@ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.calendars.Calendar; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -31,7 +30,7 @@ import java.util.Objects; /** * Request to create a new Machine Learning calendar */ -public class PutCalendarRequest extends ActionRequest implements ToXContentObject { +public class PutCalendarRequest implements Validatable, ToXContentObject { private final Calendar calendar; @@ -43,11 +42,6 @@ public class PutCalendarRequest extends ActionRequest implements ToXContentObjec return calendar; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { calendar.toXContent(builder, params); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedRequest.java index 34cb12599a61..16634ec10c69 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.datafeed.DatafeedConfig; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -31,7 +30,7 @@ import java.util.Objects; /** * Request to create a new Machine Learning Datafeed given a {@link DatafeedConfig} configuration */ -public class PutDatafeedRequest extends ActionRequest implements ToXContentObject { +public class PutDatafeedRequest implements Validatable, ToXContentObject { private final DatafeedConfig datafeed; @@ -77,8 +76,4 @@ public class PutDatafeedRequest extends ActionRequest implements ToXContentObjec return Strings.toString(this); } - @Override - public ActionRequestValidationException validate() { - return null; - } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterRequest.java index 5414c8625811..2b307b560620 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.job.config.MlFilter; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -31,7 +30,7 @@ import java.util.Objects; /** * Request to create a new Machine Learning MlFilter given a {@link MlFilter} configuration */ -public class PutFilterRequest extends ActionRequest implements ToXContentObject { +public class PutFilterRequest implements Validatable, ToXContentObject { private final MlFilter filter; @@ -77,8 +76,4 @@ public class PutFilterRequest extends ActionRequest implements ToXContentObject return Strings.toString(this); } - @Override - public ActionRequestValidationException validate() { - return null; - } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java index de8529de6bb8..9fc393e36fd0 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -31,7 +30,7 @@ import java.util.Objects; /** * Request to create a new Machine Learning Job given a {@link Job} configuration */ -public class PutJobRequest extends ActionRequest implements ToXContentObject { +public class PutJobRequest implements Validatable, ToXContentObject { private final Job job; @@ -77,8 +76,4 @@ public class PutJobRequest extends ActionRequest implements ToXContentObject { return Strings.toString(this); } - @Override - public ActionRequestValidationException validate() { - return null; - } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotRequest.java index 3a38cd86a056..2581df06a153 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.client.ml.job.process.ModelSnapshot; import org.elasticsearch.common.ParseField; @@ -33,7 +32,7 @@ import java.util.Objects; /** * A request to revert to a specific model snapshot for a given job */ -public class RevertModelSnapshotRequest extends ActionRequest implements ToXContentObject { +public class RevertModelSnapshotRequest implements Validatable, ToXContentObject { public static final ParseField DELETE_INTERVENING = new ParseField("delete_intervening_results"); @@ -82,11 +81,6 @@ public class RevertModelSnapshotRequest extends ActionRequest implements ToXCont this.deleteInterveningResults = deleteInterveningResults; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/SetUpgradeModeRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/SetUpgradeModeRequest.java index 64e94f025178..b913ebe29233 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/SetUpgradeModeRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/SetUpgradeModeRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.unit.TimeValue; @@ -28,7 +27,7 @@ import java.util.Objects; /** * Sets ML into upgrade_mode */ -public class SetUpgradeModeRequest extends ActionRequest { +public class SetUpgradeModeRequest implements Validatable { public static final ParseField ENABLED = new ParseField("enabled"); @@ -67,11 +66,6 @@ public class SetUpgradeModeRequest extends ActionRequest { this.timeout = timeout; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { return Objects.hash(enabled, timeout); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java index 4ee6d747e576..68e93141b01e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.datafeed.DatafeedConfig; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.unit.TimeValue; @@ -33,7 +32,7 @@ import java.util.Objects; /** * Request to start a Datafeed */ -public class StartDatafeedRequest extends ActionRequest implements ToXContentObject { +public class StartDatafeedRequest implements Validatable, ToXContentObject { public static final ParseField START = new ParseField("start"); public static final ParseField END = new ParseField("end"); @@ -114,11 +113,6 @@ public class StartDatafeedRequest extends ActionRequest implements ToXContentObj this.timeout = timeout; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { return Objects.hash(datafeedId, start, end, timeout); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java index 4bba828d68ee..430f24777d4c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.datafeed.DatafeedConfig; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; @@ -39,7 +38,7 @@ import java.util.Objects; /** * Request to stop Machine Learning Datafeeds */ -public class StopDatafeedRequest extends ActionRequest implements ToXContentObject { +public class StopDatafeedRequest implements Validatable, ToXContentObject { public static final ParseField TIMEOUT = new ParseField("timeout"); public static final ParseField FORCE = new ParseField("force"); @@ -144,11 +143,6 @@ public class StopDatafeedRequest extends ActionRequest implements ToXContentObje this.allowNoDatafeeds = allowNoDatafeeds; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public int hashCode() { return Objects.hash(datafeedIds, timeout, force, allowNoDatafeeds); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDatafeedRequest.java index e434c5f9a572..6fd9e6fb41d6 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDatafeedRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDatafeedRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.datafeed.DatafeedUpdate; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -32,7 +31,7 @@ import java.util.Objects; * Requests an update to a {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} with the passed {@link DatafeedUpdate} * settings */ -public class UpdateDatafeedRequest extends ActionRequest implements ToXContentObject { +public class UpdateDatafeedRequest implements Validatable, ToXContentObject { private final DatafeedUpdate update; @@ -73,8 +72,4 @@ public class UpdateDatafeedRequest extends ActionRequest implements ToXContentOb return Strings.toString(this); } - @Override - public ActionRequestValidationException validate() { - return null; - } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java index fb7d06a55251..c2e502e6578f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.job.config.MlFilter; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; @@ -36,7 +35,7 @@ import java.util.TreeSet; /** * Updates an existing {@link MlFilter} configuration */ -public class UpdateFilterRequest extends ActionRequest implements ToXContentObject { +public class UpdateFilterRequest implements Validatable, ToXContentObject { public static final ParseField ADD_ITEMS = new ParseField("add_items"); public static final ParseField REMOVE_ITEMS = new ParseField("remove_items"); @@ -149,8 +148,4 @@ public class UpdateFilterRequest extends ActionRequest implements ToXContentObje return Strings.toString(this); } - @Override - public ActionRequestValidationException validate() { - return null; - } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java index 6e050f8adcf9..91952b22f2a2 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.job.config.JobUpdate; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -32,7 +31,7 @@ import java.util.Objects; * Updates a {@link org.elasticsearch.client.ml.job.config.Job} with the passed {@link JobUpdate} * settings */ -public class UpdateJobRequest extends ActionRequest implements ToXContentObject { +public class UpdateJobRequest implements Validatable, ToXContentObject { private final JobUpdate update; @@ -73,8 +72,4 @@ public class UpdateJobRequest extends ActionRequest implements ToXContentObject return Strings.toString(this); } - @Override - public ActionRequestValidationException validate() { - return null; - } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotRequest.java index b2b6417ab2ed..bd376e7a4d92 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotRequest.java @@ -18,8 +18,7 @@ */ package org.elasticsearch.client.ml; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.client.ml.job.process.ModelSnapshot; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -32,7 +31,7 @@ import java.util.Objects; /** * A request to update information about an existing model snapshot for a given job */ -public class UpdateModelSnapshotRequest extends ActionRequest implements ToXContentObject { +public class UpdateModelSnapshotRequest implements Validatable, ToXContentObject { public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( @@ -93,11 +92,6 @@ public class UpdateModelSnapshotRequest extends ActionRequest implements ToXCont this.retain = retain; } - @Override - public ActionRequestValidationException validate() { - return null; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java new file mode 100644 index 000000000000..be7c3c00af2c --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference; + +import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; +import org.elasticsearch.client.ml.inference.trainedmodel.tree.Tree; +import org.elasticsearch.client.ml.inference.preprocessing.FrequencyEncoding; +import org.elasticsearch.client.ml.inference.preprocessing.OneHotEncoding; +import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor; +import org.elasticsearch.client.ml.inference.preprocessing.TargetMeanEncoding; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.plugins.spi.NamedXContentProvider; + +import java.util.ArrayList; +import java.util.List; + +public class MlInferenceNamedXContentProvider implements NamedXContentProvider { + + @Override + public List getNamedXContentParsers() { + List namedXContent = new ArrayList<>(); + + // PreProcessing + namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(OneHotEncoding.NAME), + OneHotEncoding::fromXContent)); + namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(TargetMeanEncoding.NAME), + TargetMeanEncoding::fromXContent)); + namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(FrequencyEncoding.NAME), + FrequencyEncoding::fromXContent)); + + // Model + namedXContent.add(new NamedXContentRegistry.Entry(TrainedModel.class, new ParseField(Tree.NAME), Tree::fromXContent)); + + return namedXContent; + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java new file mode 100644 index 000000000000..fd0810d613eb --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java @@ -0,0 +1,161 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.preprocessing; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + + +/** + * PreProcessor for frequency encoding a set of categorical values for a given field. + */ +public class FrequencyEncoding implements PreProcessor { + + public static final String NAME = "frequency_encoding"; + public static final ParseField FIELD = new ParseField("field"); + public static final ParseField FEATURE_NAME = new ParseField("feature_name"); + public static final ParseField FREQUENCY_MAP = new ParseField("frequency_map"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + a -> new FrequencyEncoding((String)a[0], (String)a[1], (Map)a[2])); + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), + (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), + FREQUENCY_MAP); + } + + public static FrequencyEncoding fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + private final String field; + private final String featureName; + private final Map frequencyMap; + + public FrequencyEncoding(String field, String featureName, Map frequencyMap) { + this.field = Objects.requireNonNull(field); + this.featureName = Objects.requireNonNull(featureName); + this.frequencyMap = Collections.unmodifiableMap(Objects.requireNonNull(frequencyMap)); + } + + /** + * @return Field name on which to frequency encode + */ + public String getField() { + return field; + } + + /** + * @return Map of Value: frequency for the frequency encoding + */ + public Map getFrequencyMap() { + return frequencyMap; + } + + /** + * @return The encoded feature name + */ + public String getFeatureName() { + return featureName; + } + + @Override + public String getName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(FIELD.getPreferredName(), field); + builder.field(FEATURE_NAME.getPreferredName(), featureName); + builder.field(FREQUENCY_MAP.getPreferredName(), frequencyMap); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FrequencyEncoding that = (FrequencyEncoding) o; + return Objects.equals(field, that.field) + && Objects.equals(featureName, that.featureName) + && Objects.equals(frequencyMap, that.frequencyMap); + } + + @Override + public int hashCode() { + return Objects.hash(field, featureName, frequencyMap); + } + + public Builder builder(String field) { + return new Builder(field); + } + + public static class Builder { + + private String field; + private String featureName; + private Map frequencyMap = new HashMap<>(); + + public Builder(String field) { + this.field = field; + } + + public Builder setField(String field) { + this.field = field; + return this; + } + + public Builder setFeatureName(String featureName) { + this.featureName = featureName; + return this; + } + + public Builder setFrequencyMap(Map frequencyMap) { + this.frequencyMap = new HashMap<>(frequencyMap); + return this; + } + + public Builder addFrequency(String valueName, double frequency) { + this.frequencyMap.put(valueName, frequency); + return this; + } + + public FrequencyEncoding build() { + return new FrequencyEncoding(field, featureName, frequencyMap); + } + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java new file mode 100644 index 000000000000..812cd723f994 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java @@ -0,0 +1,138 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.preprocessing; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * PreProcessor for one hot encoding a set of categorical values for a given field. + */ +public class OneHotEncoding implements PreProcessor { + + public static final String NAME = "one_hot_encoding"; + public static final ParseField FIELD = new ParseField("field"); + public static final ParseField HOT_MAP = new ParseField("hot_map"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + a -> new OneHotEncoding((String)a[0], (Map)a[1])); + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HOT_MAP); + } + + public static OneHotEncoding fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + private final String field; + private final Map hotMap; + + public OneHotEncoding(String field, Map hotMap) { + this.field = Objects.requireNonNull(field); + this.hotMap = Collections.unmodifiableMap(Objects.requireNonNull(hotMap)); + } + + /** + * @return Field name on which to one hot encode + */ + public String getField() { + return field; + } + + /** + * @return Map of Value: ColumnName for the one hot encoding + */ + public Map getHotMap() { + return hotMap; + } + + @Override + public String getName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(FIELD.getPreferredName(), field); + builder.field(HOT_MAP.getPreferredName(), hotMap); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OneHotEncoding that = (OneHotEncoding) o; + return Objects.equals(field, that.field) + && Objects.equals(hotMap, that.hotMap); + } + + @Override + public int hashCode() { + return Objects.hash(field, hotMap); + } + + public Builder builder(String field) { + return new Builder(field); + } + + public static class Builder { + + private String field; + private Map hotMap = new HashMap<>(); + + public Builder(String field) { + this.field = field; + } + + public Builder setField(String field) { + this.field = field; + return this; + } + + public Builder setHotMap(Map hotMap) { + this.hotMap = new HashMap<>(hotMap); + return this; + } + + public Builder addOneHot(String valueName, String oneHotFeatureName) { + this.hotMap.put(valueName, oneHotFeatureName); + return this; + } + + public OneHotEncoding build() { + return new OneHotEncoding(field, hotMap); + } + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java new file mode 100644 index 000000000000..ea814a8a0d61 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java @@ -0,0 +1,33 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.preprocessing; + +import org.elasticsearch.common.xcontent.ToXContentObject; + + +/** + * Describes a pre-processor for a defined machine learning model + */ +public interface PreProcessor extends ToXContentObject { + + /** + * @return The name of the pre-processor + */ + String getName(); +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java new file mode 100644 index 000000000000..bb29924b98e1 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java @@ -0,0 +1,183 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.preprocessing; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + + +/** + * PreProcessor for target mean encoding a set of categorical values for a given field. + */ +public class TargetMeanEncoding implements PreProcessor { + + public static final String NAME = "target_mean_encoding"; + public static final ParseField FIELD = new ParseField("field"); + public static final ParseField FEATURE_NAME = new ParseField("feature_name"); + public static final ParseField TARGET_MEANS = new ParseField("target_means"); + public static final ParseField DEFAULT_VALUE = new ParseField("default_value"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + a -> new TargetMeanEncoding((String)a[0], (String)a[1], (Map)a[2], (Double)a[3])); + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), + (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), + TARGET_MEANS); + PARSER.declareDouble(ConstructingObjectParser.constructorArg(), DEFAULT_VALUE); + } + + public static TargetMeanEncoding fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + private final String field; + private final String featureName; + private final Map meanMap; + private final double defaultValue; + + public TargetMeanEncoding(String field, String featureName, Map meanMap, Double defaultValue) { + this.field = Objects.requireNonNull(field); + this.featureName = Objects.requireNonNull(featureName); + this.meanMap = Collections.unmodifiableMap(Objects.requireNonNull(meanMap)); + this.defaultValue = Objects.requireNonNull(defaultValue); + } + + /** + * @return Field name on which to target mean encode + */ + public String getField() { + return field; + } + + /** + * @return Map of Value: targetMean for the target mean encoding + */ + public Map getMeanMap() { + return meanMap; + } + + /** + * @return The default value to set when a previously unobserved value is seen + */ + public double getDefaultValue() { + return defaultValue; + } + + /** + * @return The feature name for the encoded value + */ + public String getFeatureName() { + return featureName; + } + + @Override + public String getName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(FIELD.getPreferredName(), field); + builder.field(FEATURE_NAME.getPreferredName(), featureName); + builder.field(TARGET_MEANS.getPreferredName(), meanMap); + builder.field(DEFAULT_VALUE.getPreferredName(), defaultValue); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TargetMeanEncoding that = (TargetMeanEncoding) o; + return Objects.equals(field, that.field) + && Objects.equals(featureName, that.featureName) + && Objects.equals(meanMap, that.meanMap) + && Objects.equals(defaultValue, that.defaultValue); + } + + @Override + public int hashCode() { + return Objects.hash(field, featureName, meanMap, defaultValue); + } + + public Builder builder(String field) { + return new Builder(field); + } + + public static class Builder { + + private String field; + private String featureName; + private Map meanMap = new HashMap<>(); + private double defaultValue; + + public Builder(String field) { + this.field = field; + } + + public String getField() { + return field; + } + + public Builder setField(String field) { + this.field = field; + return this; + } + + public Builder setFeatureName(String featureName) { + this.featureName = featureName; + return this; + } + + public Builder setMeanMap(Map meanMap) { + this.meanMap = meanMap; + return this; + } + + public Builder addMeanMapEntry(String valueName, double meanEncoding) { + this.meanMap.put(valueName, meanEncoding); + return this; + } + + public Builder setDefaultValue(double defaultValue) { + this.defaultValue = defaultValue; + return this; + } + + public TargetMeanEncoding build() { + return new TargetMeanEncoding(field, featureName, meanMap, defaultValue); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/search/SearchContextException.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java similarity index 57% rename from server/src/main/java/org/elasticsearch/search/SearchContextException.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java index 8f1ebb80139c..fb1f5c3b4ab9 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchContextException.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java @@ -16,26 +16,21 @@ * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.client.ml.inference.trainedmodel; -package org.elasticsearch.search; +import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.search.internal.SearchContext; +import java.util.List; -import java.io.IOException; +public interface TrainedModel extends ToXContentObject { -public class SearchContextException extends SearchException { - - public SearchContextException(SearchContext context, String msg) { - super(context.shardTarget(), msg); - } - - public SearchContextException(SearchContext context, String msg, Throwable t) { - super(context.shardTarget(), msg, t); - } - - public SearchContextException(StreamInput in) throws IOException { - super(in); - } + /** + * @return List of featureNames expected by the model. In the order that they are expected + */ + List getFeatureNames(); + /** + * @return The name of the model + */ + String getName(); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java new file mode 100644 index 000000000000..de040ec6f9ed --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java @@ -0,0 +1,192 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.trainedmodel.tree; + +import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +public class Tree implements TrainedModel { + + public static final String NAME = "tree"; + + public static final ParseField FEATURE_NAMES = new ParseField("feature_names"); + public static final ParseField TREE_STRUCTURE = new ParseField("tree_structure"); + + private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, Builder::new); + + static { + PARSER.declareStringArray(Builder::setFeatureNames, FEATURE_NAMES); + PARSER.declareObjectArray(Builder::setNodes, (p, c) -> TreeNode.fromXContent(p), TREE_STRUCTURE); + } + + public static Tree fromXContent(XContentParser parser) { + return PARSER.apply(parser, null).build(); + } + + private final List featureNames; + private final List nodes; + + Tree(List featureNames, List nodes) { + this.featureNames = Collections.unmodifiableList(Objects.requireNonNull(featureNames)); + this.nodes = Collections.unmodifiableList(Objects.requireNonNull(nodes)); + } + + @Override + public String getName() { + return NAME; + } + + @Override + public List getFeatureNames() { + return featureNames; + } + + public List getNodes() { + return nodes; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(FEATURE_NAMES.getPreferredName(), featureNames); + builder.field(TREE_STRUCTURE.getPreferredName(), nodes); + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return Strings.toString(this); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Tree that = (Tree) o; + return Objects.equals(featureNames, that.featureNames) + && Objects.equals(nodes, that.nodes); + } + + @Override + public int hashCode() { + return Objects.hash(featureNames, nodes); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private List featureNames; + private ArrayList nodes; + private int numNodes; + + public Builder() { + nodes = new ArrayList<>(); + // allocate space in the root node and set to a leaf + nodes.add(null); + addLeaf(0, 0.0); + numNodes = 1; + } + + public Builder setFeatureNames(List featureNames) { + this.featureNames = featureNames; + return this; + } + + public Builder addNode(TreeNode.Builder node) { + nodes.add(node); + return this; + } + + public Builder setNodes(List nodes) { + this.nodes = new ArrayList<>(nodes); + return this; + } + + public Builder setNodes(TreeNode.Builder... nodes) { + return setNodes(Arrays.asList(nodes)); + } + + /** + * Add a decision node. Space for the child nodes is allocated + * @param nodeIndex Where to place the node. This is either 0 (root) or an existing child node index + * @param featureIndex The feature index the decision is made on + * @param isDefaultLeft Default left branch if the feature is missing + * @param decisionThreshold The decision threshold + * @return The created node + */ + public TreeNode.Builder addJunction(int nodeIndex, int featureIndex, boolean isDefaultLeft, double decisionThreshold) { + int leftChild = numNodes++; + int rightChild = numNodes++; + nodes.ensureCapacity(nodeIndex + 1); + for (int i = nodes.size(); i < nodeIndex + 1; i++) { + nodes.add(null); + } + + TreeNode.Builder node = TreeNode.builder(nodeIndex) + .setDefaultLeft(isDefaultLeft) + .setLeftChild(leftChild) + .setRightChild(rightChild) + .setSplitFeature(featureIndex) + .setThreshold(decisionThreshold); + nodes.set(nodeIndex, node); + + // allocate space for the child nodes + while (nodes.size() <= rightChild) { + nodes.add(null); + } + + return node; + } + + /** + * Sets the node at {@code nodeIndex} to a leaf node. + * @param nodeIndex The index as allocated by a call to {@link #addJunction(int, int, boolean, double)} + * @param value The prediction value + * @return this + */ + public Builder addLeaf(int nodeIndex, double value) { + for (int i = nodes.size(); i < nodeIndex + 1; i++) { + nodes.add(null); + } + nodes.set(nodeIndex, TreeNode.builder(nodeIndex).setLeafValue(value)); + return this; + } + + public Tree build() { + return new Tree(featureNames, + nodes.stream().map(TreeNode.Builder::build).collect(Collectors.toList())); + } + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java new file mode 100644 index 000000000000..020aaa097169 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java @@ -0,0 +1,280 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.trainedmodel.tree; + +import org.elasticsearch.client.ml.job.config.Operator; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +public class TreeNode implements ToXContentObject { + + public static final String NAME = "tree_node"; + + public static final ParseField DECISION_TYPE = new ParseField("decision_type"); + public static final ParseField THRESHOLD = new ParseField("threshold"); + public static final ParseField LEFT_CHILD = new ParseField("left_child"); + public static final ParseField RIGHT_CHILD = new ParseField("right_child"); + public static final ParseField DEFAULT_LEFT = new ParseField("default_left"); + public static final ParseField SPLIT_FEATURE = new ParseField("split_feature"); + public static final ParseField NODE_INDEX = new ParseField("node_index"); + public static final ParseField SPLIT_GAIN = new ParseField("split_gain"); + public static final ParseField LEAF_VALUE = new ParseField("leaf_value"); + + + private static final ObjectParser PARSER = new ObjectParser<>( + NAME, + true, + Builder::new); + static { + PARSER.declareDouble(Builder::setThreshold, THRESHOLD); + PARSER.declareField(Builder::setOperator, + p -> Operator.fromString(p.text()), + DECISION_TYPE, + ObjectParser.ValueType.STRING); + PARSER.declareInt(Builder::setLeftChild, LEFT_CHILD); + PARSER.declareInt(Builder::setRightChild, RIGHT_CHILD); + PARSER.declareBoolean(Builder::setDefaultLeft, DEFAULT_LEFT); + PARSER.declareInt(Builder::setSplitFeature, SPLIT_FEATURE); + PARSER.declareInt(Builder::setNodeIndex, NODE_INDEX); + PARSER.declareDouble(Builder::setSplitGain, SPLIT_GAIN); + PARSER.declareDouble(Builder::setLeafValue, LEAF_VALUE); + } + + public static Builder fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + private final Operator operator; + private final Double threshold; + private final Integer splitFeature; + private final int nodeIndex; + private final Double splitGain; + private final Double leafValue; + private final Boolean defaultLeft; + private final Integer leftChild; + private final Integer rightChild; + + + TreeNode(Operator operator, + Double threshold, + Integer splitFeature, + int nodeIndex, + Double splitGain, + Double leafValue, + Boolean defaultLeft, + Integer leftChild, + Integer rightChild) { + this.operator = operator; + this.threshold = threshold; + this.splitFeature = splitFeature; + this.nodeIndex = nodeIndex; + this.splitGain = splitGain; + this.leafValue = leafValue; + this.defaultLeft = defaultLeft; + this.leftChild = leftChild; + this.rightChild = rightChild; + } + + public Operator getOperator() { + return operator; + } + + public Double getThreshold() { + return threshold; + } + + public Integer getSplitFeature() { + return splitFeature; + } + + public Integer getNodeIndex() { + return nodeIndex; + } + + public Double getSplitGain() { + return splitGain; + } + + public Double getLeafValue() { + return leafValue; + } + + public Boolean isDefaultLeft() { + return defaultLeft; + } + + public Integer getLeftChild() { + return leftChild; + } + + public Integer getRightChild() { + return rightChild; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + addOptionalField(builder, DECISION_TYPE, operator); + addOptionalField(builder, THRESHOLD, threshold); + addOptionalField(builder, SPLIT_FEATURE, splitFeature); + addOptionalField(builder, SPLIT_GAIN, splitGain); + addOptionalField(builder, NODE_INDEX, nodeIndex); + addOptionalField(builder, LEAF_VALUE, leafValue); + addOptionalField(builder, DEFAULT_LEFT, defaultLeft ); + addOptionalField(builder, LEFT_CHILD, leftChild); + addOptionalField(builder, RIGHT_CHILD, rightChild); + builder.endObject(); + return builder; + } + + private void addOptionalField(XContentBuilder builder, ParseField field, Object value) throws IOException { + if (value != null) { + builder.field(field.getPreferredName(), value); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TreeNode that = (TreeNode) o; + return Objects.equals(operator, that.operator) + && Objects.equals(threshold, that.threshold) + && Objects.equals(splitFeature, that.splitFeature) + && Objects.equals(nodeIndex, that.nodeIndex) + && Objects.equals(splitGain, that.splitGain) + && Objects.equals(leafValue, that.leafValue) + && Objects.equals(defaultLeft, that.defaultLeft) + && Objects.equals(leftChild, that.leftChild) + && Objects.equals(rightChild, that.rightChild); + } + + @Override + public int hashCode() { + return Objects.hash(operator, + threshold, + splitFeature, + splitGain, + nodeIndex, + leafValue, + defaultLeft, + leftChild, + rightChild); + } + + @Override + public String toString() { + return Strings.toString(this); + } + + public static Builder builder(int nodeIndex) { + return new Builder(nodeIndex); + } + + public static class Builder { + private Operator operator; + private Double threshold; + private Integer splitFeature; + private int nodeIndex; + private Double splitGain; + private Double leafValue; + private Boolean defaultLeft; + private Integer leftChild; + private Integer rightChild; + + public Builder(int nodeIndex) { + nodeIndex = nodeIndex; + } + + private Builder() { + } + + public Builder setOperator(Operator operator) { + this.operator = operator; + return this; + } + + public Builder setThreshold(Double threshold) { + this.threshold = threshold; + return this; + } + + public Builder setSplitFeature(Integer splitFeature) { + this.splitFeature = splitFeature; + return this; + } + + public Builder setNodeIndex(int nodeIndex) { + this.nodeIndex = nodeIndex; + return this; + } + + public Builder setSplitGain(Double splitGain) { + this.splitGain = splitGain; + return this; + } + + public Builder setLeafValue(Double leafValue) { + this.leafValue = leafValue; + return this; + } + + public Builder setDefaultLeft(Boolean defaultLeft) { + this.defaultLeft = defaultLeft; + return this; + } + + public Builder setLeftChild(Integer leftChild) { + this.leftChild = leftChild; + return this; + } + + public Integer getLeftChild() { + return leftChild; + } + + public Builder setRightChild(Integer rightChild) { + this.rightChild = rightChild; + return this; + } + + public Integer getRightChild() { + return rightChild; + } + + public TreeNode build() { + return new TreeNode(operator, + threshold, + splitFeature, + nodeIndex, + splitGain, + leafValue, + defaultLeft, + leftChild, + rightChild); + } + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/slm/SnapshotLifecycleStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/slm/SnapshotLifecycleStats.java index fc54f74649b0..6a8eb3230306 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/slm/SnapshotLifecycleStats.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/slm/SnapshotLifecycleStats.java @@ -74,7 +74,7 @@ public class SnapshotLifecycleStats implements ToXContentObject { PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_FAILED); PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_TIMED_OUT); PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_TIME_MILLIS); - PARSER.declareNamedObjects(ConstructingObjectParser.constructorArg(), (p, c, n) -> SnapshotPolicyStats.parse(p, n), POLICY_STATS); + PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), SnapshotPolicyStats.PARSER, POLICY_STATS); } // Package visible for testing @@ -178,22 +178,25 @@ public class SnapshotLifecycleStats implements ToXContentObject { private final long snapshotsDeleted; private final long snapshotDeleteFailures; + public static final ParseField POLICY_ID = new ParseField("policy"); static final ParseField SNAPSHOTS_TAKEN = new ParseField("snapshots_taken"); static final ParseField SNAPSHOTS_FAILED = new ParseField("snapshots_failed"); static final ParseField SNAPSHOTS_DELETED = new ParseField("snapshots_deleted"); static final ParseField SNAPSHOT_DELETION_FAILURES = new ParseField("snapshot_deletion_failures"); - private static final ConstructingObjectParser PARSER = + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("snapshot_policy_stats", true, - (a, id) -> { - long taken = (long) a[0]; - long failed = (long) a[1]; - long deleted = (long) a[2]; - long deleteFailed = (long) a[3]; + a -> { + String id = (String) a[0]; + long taken = (long) a[1]; + long failed = (long) a[2]; + long deleted = (long) a[3]; + long deleteFailed = (long) a[4]; return new SnapshotPolicyStats(id, taken, failed, deleted, deleteFailed); }); static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), POLICY_ID); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_TAKEN); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_FAILED); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_DELETED); @@ -209,7 +212,11 @@ public class SnapshotLifecycleStats implements ToXContentObject { } public static SnapshotPolicyStats parse(XContentParser parser, String policyId) { - return PARSER.apply(parser, policyId); + return PARSER.apply(parser, null); + } + + public String getPolicyId() { + return policyId; } public long getSnapshotsTaken() { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DeleteDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DeleteTransformRequest.java similarity index 85% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DeleteDataFrameTransformRequest.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DeleteTransformRequest.java index d28779efd1d6..7eaeb1435b6f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DeleteDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DeleteTransformRequest.java @@ -27,16 +27,16 @@ import java.util.Optional; /** - * Request to delete a data frame transform + * Request to delete a transform */ -public class DeleteDataFrameTransformRequest implements Validatable { +public class DeleteTransformRequest implements Validatable { public static final String FORCE = "force"; private final String id; private Boolean force; - public DeleteDataFrameTransformRequest(String id) { + public DeleteTransformRequest(String id) { this.id = id; } @@ -56,7 +56,7 @@ public class DeleteDataFrameTransformRequest implements Validatable { public Optional validate() { if (id == null) { ValidationException validationException = new ValidationException(); - validationException.addValidationError("data frame transform id must not be null"); + validationException.addValidationError("transform id must not be null"); return Optional.of(validationException); } else { return Optional.empty(); @@ -77,7 +77,7 @@ public class DeleteDataFrameTransformRequest implements Validatable { if (obj == null || getClass() != obj.getClass()) { return false; } - DeleteDataFrameTransformRequest other = (DeleteDataFrameTransformRequest) obj; + DeleteTransformRequest other = (DeleteTransformRequest) obj; return Objects.equals(id, other.id) && Objects.equals(force, other.force); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformRequest.java similarity index 80% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformRequest.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformRequest.java index 3c442b650102..f0238083f6af 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformRequest.java @@ -28,22 +28,22 @@ import java.util.List; import java.util.Objects; import java.util.Optional; -public class GetDataFrameTransformRequest implements Validatable { +public class GetTransformRequest implements Validatable { public static final String ALLOW_NO_MATCH = "allow_no_match"; /** - * Helper method to create a request that will get ALL Data Frame Transforms - * @return new {@link GetDataFrameTransformRequest} object for the id "_all" + * Helper method to create a request that will get ALL Transforms + * @return new {@link GetTransformRequest} object for the id "_all" */ - public static GetDataFrameTransformRequest getAllDataFrameTransformsRequest() { - return new GetDataFrameTransformRequest("_all"); + public static GetTransformRequest getAllTransformRequest() { + return new GetTransformRequest("_all"); } private final List ids; private PageParams pageParams; private Boolean allowNoMatch; - public GetDataFrameTransformRequest(String... ids) { + public GetTransformRequest(String... ids) { this.ids = Arrays.asList(ids); } @@ -71,7 +71,7 @@ public class GetDataFrameTransformRequest implements Validatable { public Optional validate() { if (ids == null || ids.isEmpty()) { ValidationException validationException = new ValidationException(); - validationException.addValidationError("data frame transform id must not be null"); + validationException.addValidationError("transform id must not be null"); return Optional.of(validationException); } else { return Optional.empty(); @@ -92,7 +92,7 @@ public class GetDataFrameTransformRequest implements Validatable { if (obj == null || getClass() != obj.getClass()) { return false; } - GetDataFrameTransformRequest other = (GetDataFrameTransformRequest) obj; + GetTransformRequest other = (GetTransformRequest) obj; return Objects.equals(ids, other.ids) && Objects.equals(pageParams, other.pageParams) && Objects.equals(allowNoMatch, other.allowNoMatch); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformResponse.java similarity index 77% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformResponse.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformResponse.java index e1ca2df503d9..de2418b3852d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.client.transform; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfig; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -31,7 +31,7 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class GetDataFrameTransformResponse { +public class GetTransformResponse { public static final ParseField TRANSFORMS = new ParseField("transforms"); public static final ParseField INVALID_TRANSFORMS = new ParseField("invalid_transforms"); @@ -42,30 +42,30 @@ public class GetDataFrameTransformResponse { new ConstructingObjectParser<>("invalid_transforms", true, args -> new InvalidTransforms((List) args[0])); @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_data_frame_transform", true, args -> new GetDataFrameTransformResponse( - (List) args[0], (int) args[1], (InvalidTransforms) args[2])); + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "get_transform", true, args -> new GetTransformResponse( + (List) args[0], (int) args[1], (InvalidTransforms) args[2])); static { // Discard the count field which is the size of the transforms array INVALID_TRANSFORMS_PARSER.declareInt((a, b) -> {}, COUNT); INVALID_TRANSFORMS_PARSER.declareStringArray(constructorArg(), TRANSFORMS); - PARSER.declareObjectArray(constructorArg(), DataFrameTransformConfig.PARSER::apply, TRANSFORMS); + PARSER.declareObjectArray(constructorArg(), TransformConfig.PARSER::apply, TRANSFORMS); PARSER.declareInt(constructorArg(), COUNT); PARSER.declareObject(optionalConstructorArg(), INVALID_TRANSFORMS_PARSER::apply, INVALID_TRANSFORMS); } - public static GetDataFrameTransformResponse fromXContent(final XContentParser parser) { - return GetDataFrameTransformResponse.PARSER.apply(parser, null); + public static GetTransformResponse fromXContent(final XContentParser parser) { + return GetTransformResponse.PARSER.apply(parser, null); } - private List transformConfigurations; + private List transformConfigurations; private int count; private InvalidTransforms invalidTransforms; - public GetDataFrameTransformResponse(List transformConfigurations, - int count, - @Nullable InvalidTransforms invalidTransforms) { + public GetTransformResponse(List transformConfigurations, + int count, + @Nullable InvalidTransforms invalidTransforms) { this.transformConfigurations = transformConfigurations; this.count = count; this.invalidTransforms = invalidTransforms; @@ -80,7 +80,7 @@ public class GetDataFrameTransformResponse { return count; } - public List getTransformConfigurations() { + public List getTransformConfigurations() { return transformConfigurations; } @@ -99,7 +99,7 @@ public class GetDataFrameTransformResponse { return false; } - final GetDataFrameTransformResponse that = (GetDataFrameTransformResponse) other; + final GetTransformResponse that = (GetTransformResponse) other; return Objects.equals(this.transformConfigurations, that.transformConfigurations) && Objects.equals(this.count, that.count) && Objects.equals(this.invalidTransforms, that.invalidTransforms); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformStatsRequest.java similarity index 88% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsRequest.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformStatsRequest.java index 579dd715cbc5..d226d5c67bb5 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformStatsRequest.java @@ -26,12 +26,12 @@ import org.elasticsearch.client.core.PageParams; import java.util.Objects; import java.util.Optional; -public class GetDataFrameTransformStatsRequest implements Validatable { +public class GetTransformStatsRequest implements Validatable { private final String id; private PageParams pageParams; private Boolean allowNoMatch; - public GetDataFrameTransformStatsRequest(String id) { + public GetTransformStatsRequest(String id) { this.id = id; } @@ -59,7 +59,7 @@ public class GetDataFrameTransformStatsRequest implements Validatable { public Optional validate() { if (id == null) { ValidationException validationException = new ValidationException(); - validationException.addValidationError("data frame transform id must not be null"); + validationException.addValidationError("transform id must not be null"); return Optional.of(validationException); } else { return Optional.empty(); @@ -80,7 +80,7 @@ public class GetDataFrameTransformStatsRequest implements Validatable { if (obj == null || getClass() != obj.getClass()) { return false; } - GetDataFrameTransformStatsRequest other = (GetDataFrameTransformStatsRequest) obj; + GetTransformStatsRequest other = (GetTransformStatsRequest) obj; return Objects.equals(id, other.id) && Objects.equals(pageParams, other.pageParams) && Objects.equals(allowNoMatch, other.allowNoMatch); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformStatsResponse.java similarity index 78% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsResponse.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformStatsResponse.java index a1c4fc819749..4debe2ffd75a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/GetTransformStatsResponse.java @@ -21,7 +21,7 @@ package org.elasticsearch.client.transform; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.client.transform.transforms.DataFrameTransformStats; +import org.elasticsearch.client.transform.transforms.TransformStats; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -34,19 +34,19 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class GetDataFrameTransformStatsResponse { +public class GetTransformStatsResponse { public static final ParseField TRANSFORMS = new ParseField("transforms"); public static final ParseField COUNT = new ParseField("count"); @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_data_frame_transform_stats_response", true, - args -> new GetDataFrameTransformStatsResponse((List) args[0], + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "get_transform_stats_response", true, + args -> new GetTransformStatsResponse((List) args[0], (List) args[1], (List) args[2])); static { - PARSER.declareObjectArray(constructorArg(), DataFrameTransformStats.PARSER::apply, TRANSFORMS); + PARSER.declareObjectArray(constructorArg(), TransformStats.PARSER::apply, TRANSFORMS); // Discard the count field which is the size of the transforms array PARSER.declareInt((a, b) -> {}, COUNT); PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> TaskOperationFailure.fromXContent(p), @@ -55,15 +55,15 @@ public class GetDataFrameTransformStatsResponse { AcknowledgedTasksResponse.NODE_FAILURES); } - public static GetDataFrameTransformStatsResponse fromXContent(final XContentParser parser) { - return GetDataFrameTransformStatsResponse.PARSER.apply(parser, null); + public static GetTransformStatsResponse fromXContent(final XContentParser parser) { + return GetTransformStatsResponse.PARSER.apply(parser, null); } - private final List transformsStats; + private final List transformsStats; private final List taskFailures; private final List nodeFailures; - public GetDataFrameTransformStatsResponse(List transformsStats, + public GetTransformStatsResponse(List transformsStats, @Nullable List taskFailures, @Nullable List nodeFailures) { this.transformsStats = transformsStats; @@ -71,7 +71,7 @@ public class GetDataFrameTransformStatsResponse { this.nodeFailures = nodeFailures == null ? Collections.emptyList() : Collections.unmodifiableList(nodeFailures); } - public List getTransformsStats() { + public List getTransformsStats() { return transformsStats; } @@ -98,7 +98,7 @@ public class GetDataFrameTransformStatsResponse { return false; } - final GetDataFrameTransformStatsResponse that = (GetDataFrameTransformStatsResponse) other; + final GetTransformStatsResponse that = (GetTransformStatsResponse) other; return Objects.equals(this.transformsStats, that.transformsStats) && Objects.equals(this.nodeFailures, that.nodeFailures) && Objects.equals(this.taskFailures, that.taskFailures); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewTransformRequest.java similarity index 80% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewDataFrameTransformRequest.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewTransformRequest.java index ab06ebfa4c9e..4eba5c60aa71 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewTransformRequest.java @@ -21,7 +21,7 @@ package org.elasticsearch.client.transform; import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfig; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -30,15 +30,15 @@ import java.io.IOException; import java.util.Objects; import java.util.Optional; -public class PreviewDataFrameTransformRequest implements ToXContentObject, Validatable { +public class PreviewTransformRequest implements ToXContentObject, Validatable { - private final DataFrameTransformConfig config; + private final TransformConfig config; - public PreviewDataFrameTransformRequest(DataFrameTransformConfig config) { + public PreviewTransformRequest(TransformConfig config) { this.config = config; } - public DataFrameTransformConfig getConfig() { + public TransformConfig getConfig() { return config; } @@ -51,11 +51,11 @@ public class PreviewDataFrameTransformRequest implements ToXContentObject, Valid public Optional validate() { ValidationException validationException = new ValidationException(); if (config == null) { - validationException.addValidationError("preview requires a non-null data frame config"); + validationException.addValidationError("preview requires a non-null transform config"); return Optional.of(validationException); } else { if (config.getSource() == null) { - validationException.addValidationError("data frame transform source cannot be null"); + validationException.addValidationError("transform source cannot be null"); } } @@ -79,7 +79,7 @@ public class PreviewDataFrameTransformRequest implements ToXContentObject, Valid if (getClass() != obj.getClass()) { return false; } - PreviewDataFrameTransformRequest other = (PreviewDataFrameTransformRequest) obj; + PreviewTransformRequest other = (PreviewTransformRequest) obj; return Objects.equals(config, other.config); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewDataFrameTransformResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewTransformResponse.java similarity index 80% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewDataFrameTransformResponse.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewTransformResponse.java index 9f7cd2b313ab..215d529f9499 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewDataFrameTransformResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PreviewTransformResponse.java @@ -26,23 +26,23 @@ import java.util.List; import java.util.Map; import java.util.Objects; -public class PreviewDataFrameTransformResponse { +public class PreviewTransformResponse { private static final String PREVIEW = "preview"; private static final String MAPPINGS = "mappings"; @SuppressWarnings("unchecked") - public static PreviewDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException { + public static PreviewTransformResponse fromXContent(final XContentParser parser) throws IOException { Map previewMap = parser.mapOrdered(); Object previewDocs = previewMap.get(PREVIEW); Object mappings = previewMap.get(MAPPINGS); - return new PreviewDataFrameTransformResponse((List>) previewDocs, (Map) mappings); + return new PreviewTransformResponse((List>) previewDocs, (Map) mappings); } private List> docs; private Map mappings; - public PreviewDataFrameTransformResponse(List> docs, Map mappings) { + public PreviewTransformResponse(List> docs, Map mappings) { this.docs = docs; this.mappings = mappings; } @@ -65,7 +65,7 @@ public class PreviewDataFrameTransformResponse { return false; } - PreviewDataFrameTransformResponse other = (PreviewDataFrameTransformResponse) obj; + PreviewTransformResponse other = (PreviewTransformResponse) obj; return Objects.equals(other.docs, docs) && Objects.equals(other.mappings, mappings); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PutDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PutTransformRequest.java similarity index 79% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PutDataFrameTransformRequest.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PutTransformRequest.java index e948ae53e0dd..caff901e5d66 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PutDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/PutTransformRequest.java @@ -21,7 +21,7 @@ package org.elasticsearch.client.transform; import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfig; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -29,17 +29,17 @@ import java.io.IOException; import java.util.Objects; import java.util.Optional; -public class PutDataFrameTransformRequest implements ToXContentObject, Validatable { +public class PutTransformRequest implements ToXContentObject, Validatable { public static final String DEFER_VALIDATION = "defer_validation"; - private final DataFrameTransformConfig config; + private final TransformConfig config; private Boolean deferValidation; - public PutDataFrameTransformRequest(DataFrameTransformConfig config) { + public PutTransformRequest(TransformConfig config) { this.config = config; } - public DataFrameTransformConfig getConfig() { + public TransformConfig getConfig() { return config; } @@ -60,17 +60,17 @@ public class PutDataFrameTransformRequest implements ToXContentObject, Validatab public Optional validate() { ValidationException validationException = new ValidationException(); if (config == null) { - validationException.addValidationError("put requires a non-null data frame config"); + validationException.addValidationError("put requires a non-null transform config"); return Optional.of(validationException); } else { if (config.getId() == null) { - validationException.addValidationError("data frame transform id cannot be null"); + validationException.addValidationError("transform id cannot be null"); } if (config.getSource() == null) { - validationException.addValidationError("data frame transform source cannot be null"); + validationException.addValidationError("transform source cannot be null"); } if (config.getDestination() == null) { - validationException.addValidationError("data frame transform destination cannot be null"); + validationException.addValidationError("transform destination cannot be null"); } } @@ -99,7 +99,7 @@ public class PutDataFrameTransformRequest implements ToXContentObject, Validatab if (getClass() != obj.getClass()) { return false; } - PutDataFrameTransformRequest other = (PutDataFrameTransformRequest) obj; + PutTransformRequest other = (PutTransformRequest) obj; return Objects.equals(config, other.config); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartTransformRequest.java similarity index 84% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartDataFrameTransformRequest.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartTransformRequest.java index 208bebf58ab3..cda35903af4a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartTransformRequest.java @@ -26,16 +26,16 @@ import org.elasticsearch.common.unit.TimeValue; import java.util.Objects; import java.util.Optional; -public class StartDataFrameTransformRequest implements Validatable { +public class StartTransformRequest implements Validatable { private final String id; private TimeValue timeout; - public StartDataFrameTransformRequest(String id) { + public StartTransformRequest(String id) { this.id = id; } - public StartDataFrameTransformRequest(String id, TimeValue timeout) { + public StartTransformRequest(String id, TimeValue timeout) { this.id = id; this.timeout = timeout; } @@ -56,7 +56,7 @@ public class StartDataFrameTransformRequest implements Validatable { public Optional validate() { if (id == null) { ValidationException validationException = new ValidationException(); - validationException.addValidationError("data frame transform id must not be null"); + validationException.addValidationError("transform id must not be null"); return Optional.of(validationException); } else { return Optional.empty(); @@ -77,7 +77,7 @@ public class StartDataFrameTransformRequest implements Validatable { if (obj == null || getClass() != obj.getClass()) { return false; } - StartDataFrameTransformRequest other = (StartDataFrameTransformRequest) obj; + StartTransformRequest other = (StartTransformRequest) obj; return Objects.equals(this.id, other.id) && Objects.equals(this.timeout, other.timeout); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartDataFrameTransformResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartTransformResponse.java similarity index 68% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartDataFrameTransformResponse.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartTransformResponse.java index 9a96d2de7ae7..b945b965adf9 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartDataFrameTransformResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StartTransformResponse.java @@ -28,20 +28,20 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.List; -public class StartDataFrameTransformResponse extends AcknowledgedTasksResponse { +public class StartTransformResponse extends AcknowledgedTasksResponse { private static final String ACKNOWLEDGED = "acknowledged"; - private static final ConstructingObjectParser PARSER = - AcknowledgedTasksResponse.generateParser("start_data_frame_transform_response", StartDataFrameTransformResponse::new, + private static final ConstructingObjectParser PARSER = + AcknowledgedTasksResponse.generateParser("start_transform_response", StartTransformResponse::new, ACKNOWLEDGED); - public static StartDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException { + public static StartTransformResponse fromXContent(final XContentParser parser) throws IOException { return PARSER.parse(parser, null); } - public StartDataFrameTransformResponse(boolean acknowledged, @Nullable List taskFailures, - @Nullable List nodeFailures) { + public StartTransformResponse(boolean acknowledged, @Nullable List taskFailures, + @Nullable List nodeFailures) { super(acknowledged, taskFailures, nodeFailures); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopTransformRequest.java similarity index 87% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopDataFrameTransformRequest.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopTransformRequest.java index 3a662c2caec2..33fc356c8da3 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopTransformRequest.java @@ -26,20 +26,20 @@ import org.elasticsearch.common.unit.TimeValue; import java.util.Objects; import java.util.Optional; -public class StopDataFrameTransformRequest implements Validatable { +public class StopTransformRequest implements Validatable { private final String id; private Boolean waitForCompletion; private TimeValue timeout; private Boolean allowNoMatch; - public StopDataFrameTransformRequest(String id) { + public StopTransformRequest(String id) { this.id = id; waitForCompletion = null; timeout = null; } - public StopDataFrameTransformRequest(String id, Boolean waitForCompletion, TimeValue timeout) { + public StopTransformRequest(String id, Boolean waitForCompletion, TimeValue timeout) { this.id = id; this.waitForCompletion = waitForCompletion; this.timeout = timeout; @@ -77,7 +77,7 @@ public class StopDataFrameTransformRequest implements Validatable { public Optional validate() { if (id == null) { ValidationException validationException = new ValidationException(); - validationException.addValidationError("data frame transform id must not be null"); + validationException.addValidationError("transform id must not be null"); return Optional.of(validationException); } else { return Optional.empty(); @@ -98,7 +98,7 @@ public class StopDataFrameTransformRequest implements Validatable { if (obj == null || getClass() != obj.getClass()) { return false; } - StopDataFrameTransformRequest other = (StopDataFrameTransformRequest) obj; + StopTransformRequest other = (StopTransformRequest) obj; return Objects.equals(this.id, other.id) && Objects.equals(this.waitForCompletion, other.waitForCompletion) && Objects.equals(this.timeout, other.timeout) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopDataFrameTransformResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopTransformResponse.java similarity index 67% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopDataFrameTransformResponse.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopTransformResponse.java index 3993679fba8e..b8a60a591177 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopDataFrameTransformResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/StopTransformResponse.java @@ -28,19 +28,19 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.List; -public class StopDataFrameTransformResponse extends AcknowledgedTasksResponse { +public class StopTransformResponse extends AcknowledgedTasksResponse { private static final String ACKNOWLEDGED = "acknowledged"; - private static final ConstructingObjectParser PARSER = AcknowledgedTasksResponse - .generateParser("stop_data_frame_transform_response", StopDataFrameTransformResponse::new, ACKNOWLEDGED); + private static final ConstructingObjectParser PARSER = AcknowledgedTasksResponse + .generateParser("stop_transform_response", StopTransformResponse::new, ACKNOWLEDGED); - public static StopDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException { + public static StopTransformResponse fromXContent(final XContentParser parser) throws IOException { return PARSER.parse(parser, null); } - public StopDataFrameTransformResponse(boolean acknowledged, @Nullable List taskFailures, - @Nullable List nodeFailures) { + public StopTransformResponse(boolean acknowledged, @Nullable List taskFailures, + @Nullable List nodeFailures) { super(acknowledged, taskFailures, nodeFailures); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DataFrameNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/TransformNamedXContentProvider.java similarity index 95% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DataFrameNamedXContentProvider.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/TransformNamedXContentProvider.java index bf4d5b4ade17..2326d6f658cf 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/DataFrameNamedXContentProvider.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/TransformNamedXContentProvider.java @@ -28,7 +28,7 @@ import org.elasticsearch.plugins.spi.NamedXContentProvider; import java.util.Arrays; import java.util.List; -public class DataFrameNamedXContentProvider implements NamedXContentProvider { +public class TransformNamedXContentProvider implements NamedXContentProvider { @Override public List getNamedXContentParsers() { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateDataFrameTransformRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateTransformRequest.java similarity index 82% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateDataFrameTransformRequest.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateTransformRequest.java index d4fe836db0f4..e066d52fed5d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateDataFrameTransformRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateTransformRequest.java @@ -21,7 +21,7 @@ package org.elasticsearch.client.transform; import org.elasticsearch.client.Validatable; import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdate; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -29,18 +29,18 @@ import java.io.IOException; import java.util.Objects; import java.util.Optional; -public class UpdateDataFrameTransformRequest implements ToXContentObject, Validatable { +public class UpdateTransformRequest implements ToXContentObject, Validatable { - private final DataFrameTransformConfigUpdate update; + private final TransformConfigUpdate update; private final String id; private Boolean deferValidation; - public UpdateDataFrameTransformRequest(DataFrameTransformConfigUpdate update, String id) { + public UpdateTransformRequest(TransformConfigUpdate update, String id) { this.update = update; this.id = id; } - public DataFrameTransformConfigUpdate getUpdate() { + public TransformConfigUpdate getUpdate() { return update; } @@ -65,10 +65,10 @@ public class UpdateDataFrameTransformRequest implements ToXContentObject, Valida public Optional validate() { ValidationException validationException = new ValidationException(); if (update == null) { - validationException.addValidationError("put requires a non-null data frame config update object"); + validationException.addValidationError("put requires a non-null transform config update object"); } if (id == null) { - validationException.addValidationError("data frame transform id cannot be null"); + validationException.addValidationError("transform id cannot be null"); } if (validationException.validationErrors().isEmpty()) { return Optional.empty(); @@ -95,7 +95,7 @@ public class UpdateDataFrameTransformRequest implements ToXContentObject, Valida if (getClass() != obj.getClass()) { return false; } - UpdateDataFrameTransformRequest other = (UpdateDataFrameTransformRequest) obj; + UpdateTransformRequest other = (UpdateTransformRequest) obj; return Objects.equals(update, other.update) && Objects.equals(id, other.id) && Objects.equals(deferValidation, other.deferValidation); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateDataFrameTransformResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateTransformResponse.java similarity index 69% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateDataFrameTransformResponse.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateTransformResponse.java index 2afc8f9f3f56..0f5d6f99e825 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateDataFrameTransformResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/UpdateTransformResponse.java @@ -19,24 +19,24 @@ package org.elasticsearch.client.transform; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfig; import org.elasticsearch.common.xcontent.XContentParser; import java.util.Objects; -public class UpdateDataFrameTransformResponse { +public class UpdateTransformResponse { - public static UpdateDataFrameTransformResponse fromXContent(final XContentParser parser) { - return new UpdateDataFrameTransformResponse(DataFrameTransformConfig.PARSER.apply(parser, null)); + public static UpdateTransformResponse fromXContent(final XContentParser parser) { + return new UpdateTransformResponse(TransformConfig.PARSER.apply(parser, null)); } - private DataFrameTransformConfig transformConfiguration; + private TransformConfig transformConfiguration; - public UpdateDataFrameTransformResponse(DataFrameTransformConfig transformConfiguration) { + public UpdateTransformResponse(TransformConfig transformConfiguration) { this.transformConfiguration = transformConfiguration; } - public DataFrameTransformConfig getTransformConfiguration() { + public TransformConfig getTransformConfiguration() { return transformConfiguration; } @@ -55,7 +55,7 @@ public class UpdateDataFrameTransformResponse { return false; } - final UpdateDataFrameTransformResponse that = (UpdateDataFrameTransformResponse) other; + final UpdateTransformResponse that = (UpdateTransformResponse) other; return Objects.equals(this.transformConfiguration, that.transformConfiguration); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DestConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DestConfig.java index 9dce70efe238..52d05d5f165a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DestConfig.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DestConfig.java @@ -31,14 +31,14 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constru import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** - * Configuration containing the destination index for the {@link DataFrameTransformConfig} + * Configuration containing the destination index for the {@link TransformConfig} */ public class DestConfig implements ToXContentObject { public static final ParseField INDEX = new ParseField("index"); public static final ParseField PIPELINE = new ParseField("pipeline"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("data_frame_config_dest", + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("transform_config_dest", true, args -> new DestConfig((String)args[0], (String)args[1])); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/QueryConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/QueryConfig.java index 3f4727863a27..daee248f4696 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/QueryConfig.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/QueryConfig.java @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.Objects; /** - * Object for encapsulating the desired Query for a DataFrameTransform + * Object for encapsulating the desired Query for a Transform */ public class QueryConfig implements ToXContentObject { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/SourceConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/SourceConfig.java index fa72bc32391f..157a637040c4 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/SourceConfig.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/SourceConfig.java @@ -35,14 +35,14 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona /** - * Class encapsulating all options for a {@link DataFrameTransformConfig} gathering data + * Class encapsulating all options for a {@link TransformConfig} gathering data */ public class SourceConfig implements ToXContentObject { public static final ParseField QUERY = new ParseField("query"); public static final ParseField INDEX = new ParseField("index"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("data_frame_config_source", + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("transform_config_source", true, args -> { @SuppressWarnings("unchecked") diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformCheckpointStats.java similarity index 68% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointStats.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformCheckpointStats.java index 1f9606fe2dc3..8d376d2e1911 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointStats.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformCheckpointStats.java @@ -28,7 +28,7 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class DataFrameTransformCheckpointStats { +public class TransformCheckpointStats { public static final ParseField CHECKPOINT = new ParseField("checkpoint"); public static final ParseField POSITION = new ParseField("position"); @@ -36,40 +36,40 @@ public class DataFrameTransformCheckpointStats { public static final ParseField TIMESTAMP_MILLIS = new ParseField("timestamp_millis"); public static final ParseField TIME_UPPER_BOUND_MILLIS = new ParseField("time_upper_bound_millis"); - public static final DataFrameTransformCheckpointStats EMPTY = new DataFrameTransformCheckpointStats(0L, null, null, 0L, 0L); + public static final TransformCheckpointStats EMPTY = new TransformCheckpointStats(0L, null, null, 0L, 0L); private final long checkpoint; - private final DataFrameIndexerPosition position; - private final DataFrameTransformProgress checkpointProgress; + private final TransformIndexerPosition position; + private final TransformProgress checkpointProgress; private final long timestampMillis; private final long timeUpperBoundMillis; - public static final ConstructingObjectParser LENIENT_PARSER = new ConstructingObjectParser<>( - "data_frame_transform_checkpoint_stats", true, args -> { + public static final ConstructingObjectParser LENIENT_PARSER = new ConstructingObjectParser<>( + "transform_checkpoint_stats", true, args -> { long checkpoint = args[0] == null ? 0L : (Long) args[0]; - DataFrameIndexerPosition position = (DataFrameIndexerPosition) args[1]; - DataFrameTransformProgress checkpointProgress = (DataFrameTransformProgress) args[2]; + TransformIndexerPosition position = (TransformIndexerPosition) args[1]; + TransformProgress checkpointProgress = (TransformProgress) args[2]; long timestamp = args[3] == null ? 0L : (Long) args[3]; long timeUpperBound = args[4] == null ? 0L : (Long) args[4]; - return new DataFrameTransformCheckpointStats(checkpoint, position, checkpointProgress, timestamp, timeUpperBound); + return new TransformCheckpointStats(checkpoint, position, checkpointProgress, timestamp, timeUpperBound); }); static { LENIENT_PARSER.declareLong(optionalConstructorArg(), CHECKPOINT); - LENIENT_PARSER.declareObject(optionalConstructorArg(), DataFrameIndexerPosition.PARSER, POSITION); - LENIENT_PARSER.declareObject(optionalConstructorArg(), DataFrameTransformProgress.PARSER, CHECKPOINT_PROGRESS); + LENIENT_PARSER.declareObject(optionalConstructorArg(), TransformIndexerPosition.PARSER, POSITION); + LENIENT_PARSER.declareObject(optionalConstructorArg(), TransformProgress.PARSER, CHECKPOINT_PROGRESS); LENIENT_PARSER.declareLong(optionalConstructorArg(), TIMESTAMP_MILLIS); LENIENT_PARSER.declareLong(optionalConstructorArg(), TIME_UPPER_BOUND_MILLIS); } - public static DataFrameTransformCheckpointStats fromXContent(XContentParser parser) throws IOException { + public static TransformCheckpointStats fromXContent(XContentParser parser) throws IOException { return LENIENT_PARSER.parse(parser, null); } - public DataFrameTransformCheckpointStats(final long checkpoint, final DataFrameIndexerPosition position, - final DataFrameTransformProgress checkpointProgress, final long timestampMillis, - final long timeUpperBoundMillis) { + public TransformCheckpointStats(final long checkpoint, final TransformIndexerPosition position, + final TransformProgress checkpointProgress, final long timestampMillis, + final long timeUpperBoundMillis) { this.checkpoint = checkpoint; this.position = position; this.checkpointProgress = checkpointProgress; @@ -81,11 +81,11 @@ public class DataFrameTransformCheckpointStats { return checkpoint; } - public DataFrameIndexerPosition getPosition() { + public TransformIndexerPosition getPosition() { return position; } - public DataFrameTransformProgress getCheckpointProgress() { + public TransformProgress getCheckpointProgress() { return checkpointProgress; } @@ -112,7 +112,7 @@ public class DataFrameTransformCheckpointStats { return false; } - DataFrameTransformCheckpointStats that = (DataFrameTransformCheckpointStats) other; + TransformCheckpointStats that = (TransformCheckpointStats) other; return this.checkpoint == that.checkpoint && Objects.equals(this.position, that.position) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointingInfo.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformCheckpointingInfo.java similarity index 71% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointingInfo.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformCheckpointingInfo.java index 79d02c523ffa..d5ba36438441 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointingInfo.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformCheckpointingInfo.java @@ -29,37 +29,37 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.time.Instant; import java.util.Objects; -public class DataFrameTransformCheckpointingInfo { +public class TransformCheckpointingInfo { public static final ParseField LAST_CHECKPOINT = new ParseField("last", "current"); public static final ParseField NEXT_CHECKPOINT = new ParseField("next", "in_progress"); public static final ParseField OPERATIONS_BEHIND = new ParseField("operations_behind"); public static final ParseField CHANGES_LAST_DETECTED_AT = new ParseField("changes_last_detected_at"); - private final DataFrameTransformCheckpointStats last; - private final DataFrameTransformCheckpointStats next; + private final TransformCheckpointStats last; + private final TransformCheckpointStats next; private final long operationsBehind; private final Instant changesLastDetectedAt; - private static final ConstructingObjectParser LENIENT_PARSER = + private static final ConstructingObjectParser LENIENT_PARSER = new ConstructingObjectParser<>( - "data_frame_transform_checkpointing_info", + "transform_checkpointing_info", true, a -> { long behind = a[2] == null ? 0L : (Long) a[2]; Instant changesLastDetectedAt = (Instant)a[3]; - return new DataFrameTransformCheckpointingInfo( - a[0] == null ? DataFrameTransformCheckpointStats.EMPTY : (DataFrameTransformCheckpointStats) a[0], - a[1] == null ? DataFrameTransformCheckpointStats.EMPTY : (DataFrameTransformCheckpointStats) a[1], + return new TransformCheckpointingInfo( + a[0] == null ? TransformCheckpointStats.EMPTY : (TransformCheckpointStats) a[0], + a[1] == null ? TransformCheckpointStats.EMPTY : (TransformCheckpointStats) a[1], behind, changesLastDetectedAt); }); static { LENIENT_PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> DataFrameTransformCheckpointStats.fromXContent(p), LAST_CHECKPOINT); + (p, c) -> TransformCheckpointStats.fromXContent(p), LAST_CHECKPOINT); LENIENT_PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> DataFrameTransformCheckpointStats.fromXContent(p), NEXT_CHECKPOINT); + (p, c) -> TransformCheckpointStats.fromXContent(p), NEXT_CHECKPOINT); LENIENT_PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), OPERATIONS_BEHIND); LENIENT_PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> TimeUtil.parseTimeFieldToInstant(p, CHANGES_LAST_DETECTED_AT.getPreferredName()), @@ -67,21 +67,21 @@ public class DataFrameTransformCheckpointingInfo { ObjectParser.ValueType.VALUE); } - public DataFrameTransformCheckpointingInfo(DataFrameTransformCheckpointStats last, - DataFrameTransformCheckpointStats next, - long operationsBehind, - Instant changesLastDetectedAt) { + public TransformCheckpointingInfo(TransformCheckpointStats last, + TransformCheckpointStats next, + long operationsBehind, + Instant changesLastDetectedAt) { this.last = Objects.requireNonNull(last); this.next = Objects.requireNonNull(next); this.operationsBehind = operationsBehind; this.changesLastDetectedAt = changesLastDetectedAt; } - public DataFrameTransformCheckpointStats getLast() { + public TransformCheckpointStats getLast() { return last; } - public DataFrameTransformCheckpointStats getNext() { + public TransformCheckpointStats getNext() { return next; } @@ -94,7 +94,7 @@ public class DataFrameTransformCheckpointingInfo { return changesLastDetectedAt; } - public static DataFrameTransformCheckpointingInfo fromXContent(XContentParser p) { + public static TransformCheckpointingInfo fromXContent(XContentParser p) { return LENIENT_PARSER.apply(p, null); } @@ -113,7 +113,7 @@ public class DataFrameTransformCheckpointingInfo { return false; } - DataFrameTransformCheckpointingInfo that = (DataFrameTransformCheckpointingInfo) other; + TransformCheckpointingInfo that = (TransformCheckpointingInfo) other; return Objects.equals(this.last, that.last) && Objects.equals(this.next, that.next) && diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfig.java similarity index 85% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfig.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfig.java index d3abc73e6f3a..ff740cfcf242 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfig.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfig.java @@ -40,7 +40,7 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class DataFrameTransformConfig implements ToXContentObject { +public class TransformConfig implements ToXContentObject { public static final ParseField ID = new ParseField("id"); public static final ParseField SOURCE = new ParseField("source"); @@ -63,8 +63,8 @@ public class DataFrameTransformConfig implements ToXContentObject { private final Version transformVersion; private final Instant createTime; - public static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>("data_frame_transform", true, + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("transform", true, (args) -> { String id = (String) args[0]; SourceConfig source = (SourceConfig) args[1]; @@ -75,7 +75,7 @@ public class DataFrameTransformConfig implements ToXContentObject { String description = (String)args[6]; Instant createTime = (Instant)args[7]; String transformVersion = (String)args[8]; - return new DataFrameTransformConfig(id, + return new TransformConfig(id, source, dest, frequency, @@ -109,34 +109,34 @@ public class DataFrameTransformConfig implements ToXContentObject { } - public static DataFrameTransformConfig fromXContent(final XContentParser parser) { + public static TransformConfig fromXContent(final XContentParser parser) { return PARSER.apply(parser, null); } /** - * Helper method for previewing a data frame transform configuration + * Helper method for previewing a transform configuration * - * The DataFrameTransformConfig returned from this method should only be used for previewing the resulting data. + * The TransformConfig returned from this method should only be used for previewing the resulting data. * - * A new, valid, DataFrameTransformConfig with an appropriate destination and ID will have to be constructed to create + * A new, valid, TransformConfig with an appropriate destination and ID will have to be constructed to create * the transform. * @param source Source configuration for gathering the data * @param pivotConfig Pivot config to preview - * @return A DataFrameTransformConfig to preview, NOTE it will have a {@code null} id, destination and index. + * @return A TransformConfig to preview, NOTE it will have a {@code null} id, destination and index. */ - public static DataFrameTransformConfig forPreview(final SourceConfig source, final PivotConfig pivotConfig) { - return new DataFrameTransformConfig(null, source, null, null, null, pivotConfig, null, null, null); + public static TransformConfig forPreview(final SourceConfig source, final PivotConfig pivotConfig) { + return new TransformConfig(null, source, null, null, null, pivotConfig, null, null, null); } - DataFrameTransformConfig(final String id, - final SourceConfig source, - final DestConfig dest, - final TimeValue frequency, - final SyncConfig syncConfig, - final PivotConfig pivotConfig, - final String description, - final Instant createTime, - final String version) { + TransformConfig(final String id, + final SourceConfig source, + final DestConfig dest, + final TimeValue frequency, + final SyncConfig syncConfig, + final PivotConfig pivotConfig, + final String description, + final Instant createTime, + final String version) { this.id = id; this.source = source; this.dest = dest; @@ -231,7 +231,7 @@ public class DataFrameTransformConfig implements ToXContentObject { return false; } - final DataFrameTransformConfig that = (DataFrameTransformConfig) other; + final TransformConfig that = (TransformConfig) other; return Objects.equals(this.id, that.id) && Objects.equals(this.source, that.source) @@ -303,8 +303,8 @@ public class DataFrameTransformConfig implements ToXContentObject { return this; } - public DataFrameTransformConfig build() { - return new DataFrameTransformConfig(id, source, dest, frequency, syncConfig, pivotConfig, description, null, null); + public TransformConfig build() { + return new TransformConfig(id, source, dest, frequency, syncConfig, pivotConfig, description, null, null); } } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigUpdate.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdate.java similarity index 72% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigUpdate.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdate.java index 945e8b821169..241c578dbad4 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigUpdate.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdate.java @@ -34,30 +34,30 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** - * This class holds the mutable configuration items for a data frame transform + * This class holds the mutable configuration items for a transform */ -public class DataFrameTransformConfigUpdate implements ToXContentObject { +public class TransformConfigUpdate implements ToXContentObject { - public static final String NAME = "data_frame_transform_config_update"; - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, + public static final String NAME = "transform_config_update"; + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, false, (args) -> { SourceConfig source = (SourceConfig) args[0]; DestConfig dest = (DestConfig) args[1]; TimeValue frequency = args[2] == null ? null : - TimeValue.parseTimeValue((String) args[2], DataFrameTransformConfig.FREQUENCY.getPreferredName()); + TimeValue.parseTimeValue((String) args[2], TransformConfig.FREQUENCY.getPreferredName()); SyncConfig syncConfig = (SyncConfig) args[3]; String description = (String) args[4]; - return new DataFrameTransformConfigUpdate(source, dest, frequency, syncConfig, description); + return new TransformConfigUpdate(source, dest, frequency, syncConfig, description); }); static { - PARSER.declareObject(optionalConstructorArg(), (p, c) -> SourceConfig.PARSER.apply(p, null), DataFrameTransformConfig.SOURCE); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> DestConfig.PARSER.apply(p, null), DataFrameTransformConfig.DEST); - PARSER.declareString(optionalConstructorArg(), DataFrameTransformConfig.FREQUENCY); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p), DataFrameTransformConfig.SYNC); - PARSER.declareString(optionalConstructorArg(), DataFrameTransformConfig.DESCRIPTION); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> SourceConfig.PARSER.apply(p, null), TransformConfig.SOURCE); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> DestConfig.PARSER.apply(p, null), TransformConfig.DEST); + PARSER.declareString(optionalConstructorArg(), TransformConfig.FREQUENCY); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p), TransformConfig.SYNC); + PARSER.declareString(optionalConstructorArg(), TransformConfig.DESCRIPTION); } private static SyncConfig parseSyncConfig(XContentParser parser) throws IOException { @@ -74,11 +74,11 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject { private final SyncConfig syncConfig; private final String description; - public DataFrameTransformConfigUpdate(final SourceConfig source, - final DestConfig dest, - final TimeValue frequency, - final SyncConfig syncConfig, - final String description){ + public TransformConfigUpdate(final SourceConfig source, + final DestConfig dest, + final TimeValue frequency, + final SyncConfig syncConfig, + final String description) { this.source = source; this.dest = dest; this.frequency = frequency; @@ -111,21 +111,21 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject { public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject(); if (source != null) { - builder.field(DataFrameTransformConfig.SOURCE.getPreferredName(), source); + builder.field(TransformConfig.SOURCE.getPreferredName(), source); } if (dest != null) { - builder.field(DataFrameTransformConfig.DEST.getPreferredName(), dest); + builder.field(TransformConfig.DEST.getPreferredName(), dest); } if (frequency != null) { - builder.field(DataFrameTransformConfig.FREQUENCY.getPreferredName(), frequency.getStringRep()); + builder.field(TransformConfig.FREQUENCY.getPreferredName(), frequency.getStringRep()); } if (syncConfig != null) { - builder.startObject(DataFrameTransformConfig.SYNC.getPreferredName()); + builder.startObject(TransformConfig.SYNC.getPreferredName()); builder.field(syncConfig.getName(), syncConfig); builder.endObject(); } if (description != null) { - builder.field(DataFrameTransformConfig.DESCRIPTION.getPreferredName(), description); + builder.field(TransformConfig.DESCRIPTION.getPreferredName(), description); } builder.endObject(); return builder; @@ -141,7 +141,7 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject { return false; } - final DataFrameTransformConfigUpdate that = (DataFrameTransformConfigUpdate) other; + final TransformConfigUpdate that = (TransformConfigUpdate) other; return Objects.equals(this.source, that.source) && Objects.equals(this.dest, that.dest) @@ -164,7 +164,7 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject { return new Builder(); } - public static DataFrameTransformConfigUpdate fromXContent(final XContentParser parser) { + public static TransformConfigUpdate fromXContent(final XContentParser parser) { return PARSER.apply(parser, null); } @@ -201,8 +201,8 @@ public class DataFrameTransformConfigUpdate implements ToXContentObject { return this; } - public DataFrameTransformConfigUpdate build() { - return new DataFrameTransformConfigUpdate(source, dest, frequency, syncConfig, description); + public TransformConfigUpdate build() { + return new TransformConfigUpdate(source, dest, frequency, syncConfig, description); } } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerPosition.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformIndexerPosition.java similarity index 89% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerPosition.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformIndexerPosition.java index 6141f77c3b0d..c312666be890 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerPosition.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformIndexerPosition.java @@ -37,7 +37,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona * indexer_position: the position of the indexer querying the source * bucket_position: the position used for identifying changes */ -public class DataFrameIndexerPosition { +public class TransformIndexerPosition { public static final ParseField INDEXER_POSITION = new ParseField("indexer_position"); public static final ParseField BUCKET_POSITION = new ParseField("bucket_position"); @@ -45,17 +45,17 @@ public class DataFrameIndexerPosition { private final Map bucketPosition; @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "data_frame_indexer_position", + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "transform_indexer_position", true, - args -> new DataFrameIndexerPosition((Map) args[0],(Map) args[1])); + args -> new TransformIndexerPosition((Map) args[0],(Map) args[1])); static { PARSER.declareField(optionalConstructorArg(), XContentParser::mapOrdered, INDEXER_POSITION, ValueType.OBJECT); PARSER.declareField(optionalConstructorArg(), XContentParser::mapOrdered, BUCKET_POSITION, ValueType.OBJECT); } - public DataFrameIndexerPosition(Map indexerPosition, Map bucketPosition) { + public TransformIndexerPosition(Map indexerPosition, Map bucketPosition) { this.indexerPosition = indexerPosition == null ? null : Collections.unmodifiableMap(indexerPosition); this.bucketPosition = bucketPosition == null ? null : Collections.unmodifiableMap(bucketPosition); } @@ -78,7 +78,7 @@ public class DataFrameIndexerPosition { return false; } - DataFrameIndexerPosition that = (DataFrameIndexerPosition) other; + TransformIndexerPosition that = (TransformIndexerPosition) other; return Objects.equals(this.indexerPosition, that.indexerPosition) && Objects.equals(this.bucketPosition, that.bucketPosition); @@ -89,7 +89,7 @@ public class DataFrameIndexerPosition { return Objects.hash(indexerPosition, bucketPosition); } - public static DataFrameIndexerPosition fromXContent(XContentParser parser) { + public static TransformIndexerPosition fromXContent(XContentParser parser) { try { return PARSER.parse(parser, null); } catch (IOException e) { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerTransformStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformIndexerStats.java similarity index 84% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerTransformStats.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformIndexerStats.java index 23a25c511b20..2a04c6ea45eb 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerTransformStats.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformIndexerStats.java @@ -30,16 +30,16 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class DataFrameIndexerTransformStats extends IndexerJobStats { +public class TransformIndexerStats extends IndexerJobStats { static ParseField EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS = new ParseField("exponential_avg_checkpoint_duration_ms"); static ParseField EXPONENTIAL_AVG_DOCUMENTS_INDEXED = new ParseField("exponential_avg_documents_indexed"); static ParseField EXPONENTIAL_AVG_DOCUMENTS_PROCESSED = new ParseField("exponential_avg_documents_processed"); - public static final ConstructingObjectParser LENIENT_PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser LENIENT_PARSER = new ConstructingObjectParser<>( NAME, true, - args -> new DataFrameIndexerTransformStats((long) args[0], (long) args[1], (long) args[2], + args -> new TransformIndexerStats((long) args[0], (long) args[1], (long) args[2], (long) args[3], (long) args[4], (long) args[5], (long) args[6], (long) args[7], (long) args[8], (long) args[9], (Double) args[10], (Double) args[11], (Double) args[12])); @@ -59,7 +59,7 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats { LENIENT_PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_DOCUMENTS_PROCESSED); } - public static DataFrameIndexerTransformStats fromXContent(XContentParser parser) throws IOException { + public static TransformIndexerStats fromXContent(XContentParser parser) throws IOException { return LENIENT_PARSER.parse(parser, null); } @@ -67,11 +67,11 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats { private final double expAvgDocumentsIndexed; private final double expAvgDocumentsProcessed; - public DataFrameIndexerTransformStats(long numPages, long numInputDocuments, long numOuputDocuments, - long numInvocations, long indexTime, long searchTime, - long indexTotal, long searchTotal, long indexFailures, long searchFailures, - Double expAvgCheckpointDurationMs, Double expAvgDocumentsIndexed, - Double expAvgDocumentsProcessed) { + public TransformIndexerStats(long numPages, long numInputDocuments, long numOuputDocuments, + long numInvocations, long indexTime, long searchTime, + long indexTotal, long searchTotal, long indexFailures, long searchFailures, + Double expAvgCheckpointDurationMs, Double expAvgDocumentsIndexed, + Double expAvgDocumentsProcessed) { super(numPages, numInputDocuments, numOuputDocuments, numInvocations, indexTime, searchTime, indexTotal, searchTotal, indexFailures, searchFailures); this.expAvgCheckpointDurationMs = expAvgCheckpointDurationMs == null ? 0.0 : expAvgCheckpointDurationMs; @@ -101,7 +101,7 @@ public class DataFrameIndexerTransformStats extends IndexerJobStats { return false; } - DataFrameIndexerTransformStats that = (DataFrameIndexerTransformStats) other; + TransformIndexerStats that = (TransformIndexerStats) other; return Objects.equals(this.numPages, that.numPages) && Objects.equals(this.numInputDocuments, that.numInputDocuments) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformProgress.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformProgress.java similarity index 82% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformProgress.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformProgress.java index 73eacac85132..e3d226d87428 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformProgress.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformProgress.java @@ -28,7 +28,7 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class DataFrameTransformProgress { +public class TransformProgress { public static final ParseField TOTAL_DOCS = new ParseField("total_docs"); public static final ParseField DOCS_REMAINING = new ParseField("docs_remaining"); @@ -36,10 +36,10 @@ public class DataFrameTransformProgress { public static final ParseField DOCS_PROCESSED = new ParseField("docs_processed"); public static final ParseField DOCS_INDEXED = new ParseField("docs_indexed"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "data_frame_transform_progress", + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "transform_progress", true, - a -> new DataFrameTransformProgress((Long) a[0], (Long)a[1], (Double)a[2], (Long)a[3], (Long)a[4])); + a -> new TransformProgress((Long) a[0], (Long)a[1], (Double)a[2], (Long)a[3], (Long)a[4])); static { PARSER.declareLong(optionalConstructorArg(), TOTAL_DOCS); @@ -49,7 +49,7 @@ public class DataFrameTransformProgress { PARSER.declareLong(optionalConstructorArg(), DOCS_INDEXED); } - public static DataFrameTransformProgress fromXContent(XContentParser parser) { + public static TransformProgress fromXContent(XContentParser parser) { return PARSER.apply(parser, null); } @@ -59,11 +59,11 @@ public class DataFrameTransformProgress { private final long documentsProcessed; private final long documentsIndexed; - public DataFrameTransformProgress(Long totalDocs, - Long remainingDocs, - Double percentComplete, - Long documentsProcessed, - Long documentsIndexed) { + public TransformProgress(Long totalDocs, + Long remainingDocs, + Double percentComplete, + Long documentsProcessed, + Long documentsIndexed) { this.totalDocs = totalDocs; this.remainingDocs = remainingDocs == null ? totalDocs : remainingDocs; this.percentComplete = percentComplete; @@ -104,7 +104,7 @@ public class DataFrameTransformProgress { return false; } - DataFrameTransformProgress that = (DataFrameTransformProgress) other; + TransformProgress that = (TransformProgress) other; return Objects.equals(this.remainingDocs, that.remainingDocs) && Objects.equals(this.totalDocs, that.totalDocs) && Objects.equals(this.percentComplete, that.percentComplete) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformStats.java similarity index 76% rename from client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformStats.java rename to client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformStats.java index ccf2a18fb348..012b6751e593 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/DataFrameTransformStats.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformStats.java @@ -31,7 +31,7 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class DataFrameTransformStats { +public class TransformStats { public static final ParseField ID = new ParseField("id"); public static final ParseField STATE_FIELD = new ParseField("state"); @@ -40,10 +40,10 @@ public class DataFrameTransformStats { public static final ParseField STATS_FIELD = new ParseField("stats"); public static final ParseField CHECKPOINTING_INFO_FIELD = new ParseField("checkpointing"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "data_frame_transform_state_and_stats_info", true, - a -> new DataFrameTransformStats((String) a[0], (State) a[1], (String) a[2], - (NodeAttributes) a[3], (DataFrameIndexerTransformStats) a[4], (DataFrameTransformCheckpointingInfo) a[5])); + a -> new TransformStats((String) a[0], (State) a[1], (String) a[2], + (NodeAttributes) a[3], (TransformIndexerStats) a[4], (TransformCheckpointingInfo) a[5])); static { PARSER.declareString(constructorArg(), ID); @@ -51,12 +51,12 @@ public class DataFrameTransformStats { ObjectParser.ValueType.STRING); PARSER.declareString(optionalConstructorArg(), REASON_FIELD); PARSER.declareField(optionalConstructorArg(), NodeAttributes.PARSER::apply, NODE_FIELD, ObjectParser.ValueType.OBJECT); - PARSER.declareObject(constructorArg(), (p, c) -> DataFrameIndexerTransformStats.fromXContent(p), STATS_FIELD); + PARSER.declareObject(constructorArg(), (p, c) -> TransformIndexerStats.fromXContent(p), STATS_FIELD); PARSER.declareObject(optionalConstructorArg(), - (p, c) -> DataFrameTransformCheckpointingInfo.fromXContent(p), CHECKPOINTING_INFO_FIELD); + (p, c) -> TransformCheckpointingInfo.fromXContent(p), CHECKPOINTING_INFO_FIELD); } - public static DataFrameTransformStats fromXContent(XContentParser parser) throws IOException { + public static TransformStats fromXContent(XContentParser parser) throws IOException { return PARSER.parse(parser, null); } @@ -64,11 +64,11 @@ public class DataFrameTransformStats { private final String reason; private final State state; private final NodeAttributes node; - private final DataFrameIndexerTransformStats indexerStats; - private final DataFrameTransformCheckpointingInfo checkpointingInfo; + private final TransformIndexerStats indexerStats; + private final TransformCheckpointingInfo checkpointingInfo; - public DataFrameTransformStats(String id, State state, String reason, NodeAttributes node, DataFrameIndexerTransformStats stats, - DataFrameTransformCheckpointingInfo checkpointingInfo) { + public TransformStats(String id, State state, String reason, NodeAttributes node, TransformIndexerStats stats, + TransformCheckpointingInfo checkpointingInfo) { this.id = id; this.state = state; this.reason = reason; @@ -93,11 +93,11 @@ public class DataFrameTransformStats { return node; } - public DataFrameIndexerTransformStats getIndexerStats() { + public TransformIndexerStats getIndexerStats() { return indexerStats; } - public DataFrameTransformCheckpointingInfo getCheckpointingInfo() { + public TransformCheckpointingInfo getCheckpointingInfo() { return checkpointingInfo; } @@ -116,7 +116,7 @@ public class DataFrameTransformStats { return false; } - DataFrameTransformStats that = (DataFrameTransformStats) other; + TransformStats that = (TransformStats) other; return Objects.equals(this.id, that.id) && Objects.equals(this.state, that.state) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/pivot/DateHistogramGroupSource.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/pivot/DateHistogramGroupSource.java index 6c775142539f..2b653f3fbf14 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/pivot/DateHistogramGroupSource.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/pivot/DateHistogramGroupSource.java @@ -72,7 +72,7 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo * fixed_interval fixed intervals like 1h, 1m, 1d * calendar_interval calendar aware intervals like 1M, 1Y, ... * - * Note: data frames do not support the deprecated interval option + * Note: transform does not support the deprecated interval option */ public interface Interval extends ToXContentFragment { String getName(); diff --git a/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider b/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider index 9153347d17c5..145d06bd46b7 100644 --- a/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider +++ b/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider @@ -1,4 +1,5 @@ org.elasticsearch.client.ilm.IndexLifecycleNamedXContentProvider org.elasticsearch.client.ml.dataframe.MlDataFrameAnalysisNamedXContentProvider org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider -org.elasticsearch.client.transform.DataFrameNamedXContentProvider +org.elasticsearch.client.ml.inference.MlInferenceNamedXContentProvider +org.elasticsearch.client.transform.TransformNamedXContentProvider diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java index d30dbfc19cfa..9a5eff72c54f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java @@ -40,7 +40,6 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.action.document.RestBulkAction; import org.elasticsearch.search.SearchHit; import org.hamcrest.Matcher; -import org.hamcrest.Matchers; import java.io.IOException; import java.util.Arrays; @@ -56,9 +55,7 @@ import java.util.stream.IntStream; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.fieldFromSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasIndex; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasProperty; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasType; import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.either; @@ -360,7 +357,6 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase { Iterable hits = searchAll(new SearchRequest("test").routing("routing")); assertThat(hits, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ")))); - assertThat(hits, everyItem(Matchers.allOf(hasIndex("test"), hasType(globalType)))); assertThat(hits, containsInAnyOrder(expectedIds(numDocs))); } @@ -391,7 +387,6 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase { Iterable hits = searchAll(new SearchRequest("test").routing("routing")); assertThat(hits, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ")))); - assertThat(hits, everyItem(Matchers.allOf(hasIndex("test"), hasType(localType)))); assertThat(hits, containsInAnyOrder(expectedIds(numDocs))); } } @@ -422,7 +417,6 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase { Iterable hits = searchAll(new SearchRequest("test").routing("routing")); assertThat(hits, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ")))); - assertThat(hits, everyItem(Matchers.allOf(hasIndex("test"), hasType(customType)))); assertThat(hits, containsInAnyOrder(expectedIds(numDocs))); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkRequestWithGlobalParametersIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkRequestWithGlobalParametersIT.java index 4c8e05aa28ea..b8f557d20616 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkRequestWithGlobalParametersIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkRequestWithGlobalParametersIT.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.rest.action.document.RestBulkAction; import org.elasticsearch.search.SearchHit; import java.io.IOException; @@ -33,7 +32,6 @@ import java.util.function.Function; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasIndex; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasProperty; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasType; import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.emptyIterable; @@ -138,36 +136,6 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest .and(hasIndex("global_index")))); } - public void testGlobalType() throws IOException { - BulkRequest request = new BulkRequest(null, "global_type"); - request.add(new IndexRequest("index").id("1") - .source(XContentType.JSON, "field", "bulk1")); - request.add(new IndexRequest("index").id("2") - .source(XContentType.JSON, "field", "bulk2")); - - bulkWithTypes(request); - - Iterable hits = searchAll("index"); - assertThat(hits, everyItem(hasType("global_type"))); - } - - public void testTypeGlobalAndPerRequest() throws IOException { - BulkRequest request = new BulkRequest(null, "global_type"); - request.add(new IndexRequest("index1", "local_type", "1") - .source(XContentType.JSON, "field", "bulk1")); - request.add(new IndexRequest("index2").id("2") // will take global type - .source(XContentType.JSON, "field", "bulk2")); - - bulkWithTypes(request); - - Iterable hits = searchAll("index1", "index2"); - assertThat(hits, containsInAnyOrder( - both(hasId("1")) - .and(hasType("local_type")), - both(hasId("2")) - .and(hasType("global_type")))); - } - public void testGlobalRouting() throws IOException { createIndexWithMultipleShards("index"); BulkRequest request = new BulkRequest((String) null); @@ -177,7 +145,7 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest .source(XContentType.JSON, "field", "bulk1")); request.routing("1"); bulk(request); - + Iterable emptyHits = searchAll(new SearchRequest("index").routing("xxx")); assertThat(emptyHits, is(emptyIterable())); @@ -199,7 +167,7 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest Iterable hits = searchAll(new SearchRequest("index").routing("globalRouting", "localRouting")); assertThat(hits, containsInAnyOrder(hasId("1"), hasId("2"))); } - + public void testGlobalIndexNoTypes() throws IOException { BulkRequest request = new BulkRequest("global_index"); request.add(new IndexRequest().id("1") @@ -211,20 +179,13 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest Iterable hits = searchAll("global_index"); assertThat(hits, everyItem(hasIndex("global_index"))); - } - - private BulkResponse bulkWithTypes(BulkRequest request) throws IOException { - BulkResponse bulkResponse = execute(request, highLevelClient()::bulk, highLevelClient()::bulkAsync, - expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE)); - assertFalse(bulkResponse.hasFailures()); - return bulkResponse; } - + private BulkResponse bulk(BulkRequest request) throws IOException { BulkResponse bulkResponse = execute(request, highLevelClient()::bulk, highLevelClient()::bulkAsync, RequestOptions.DEFAULT); assertFalse(bulkResponse.hasFailures()); return bulkResponse; - } + } @SuppressWarnings("unchecked") private static Function fieldFromSource(String fieldName) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameRequestConvertersTests.java index 4c4b1d8f709b..f1c181b61796 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameRequestConvertersTests.java @@ -24,19 +24,19 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.transform.DataFrameNamedXContentProvider; -import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest; -import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest; -import org.elasticsearch.client.transform.PutDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformRequest; -import org.elasticsearch.client.transform.StopDataFrameTransformRequest; -import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigTests; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdateTests; +import org.elasticsearch.client.transform.TransformNamedXContentProvider; +import org.elasticsearch.client.transform.DeleteTransformRequest; +import org.elasticsearch.client.transform.GetTransformRequest; +import org.elasticsearch.client.transform.GetTransformStatsRequest; +import org.elasticsearch.client.transform.PreviewTransformRequest; +import org.elasticsearch.client.transform.PutTransformRequest; +import org.elasticsearch.client.transform.StartTransformRequest; +import org.elasticsearch.client.transform.StopTransformRequest; +import org.elasticsearch.client.transform.UpdateTransformRequest; +import org.elasticsearch.client.transform.transforms.TransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfigTests; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdate; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdateTests; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -49,7 +49,7 @@ import java.io.IOException; import java.util.Collections; import java.util.List; -import static org.elasticsearch.client.transform.GetDataFrameTransformRequest.ALLOW_NO_MATCH; +import static org.elasticsearch.client.transform.GetTransformRequest.ALLOW_NO_MATCH; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; @@ -62,51 +62,51 @@ public class DataFrameRequestConvertersTests extends ESTestCase { protected NamedXContentRegistry xContentRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); List namedXContents = searchModule.getNamedXContents(); - namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers()); + namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers()); return new NamedXContentRegistry(namedXContents); } public void testPutDataFrameTransform() throws IOException { - PutDataFrameTransformRequest putRequest = new PutDataFrameTransformRequest( - DataFrameTransformConfigTests.randomDataFrameTransformConfig()); - Request request = DataFrameRequestConverters.putDataFrameTransform(putRequest); + PutTransformRequest putRequest = new PutTransformRequest( + TransformConfigTests.randomTransformConfig()); + Request request = TransformRequestConverters.putTransform(putRequest); assertThat(request.getParameters(), not(hasKey("defer_validation"))); assertEquals(HttpPut.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/" + putRequest.getConfig().getId())); try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { - DataFrameTransformConfig parsedConfig = DataFrameTransformConfig.PARSER.apply(parser, null); + TransformConfig parsedConfig = TransformConfig.PARSER.apply(parser, null); assertThat(parsedConfig, equalTo(putRequest.getConfig())); } putRequest.setDeferValidation(true); - request = DataFrameRequestConverters.putDataFrameTransform(putRequest); + request = TransformRequestConverters.putTransform(putRequest); assertThat(request.getParameters(), hasEntry("defer_validation", Boolean.toString(putRequest.getDeferValidation()))); } public void testUpdateDataFrameTransform() throws IOException { String transformId = randomAlphaOfLength(10); - UpdateDataFrameTransformRequest updateDataFrameTransformRequest = new UpdateDataFrameTransformRequest( - DataFrameTransformConfigUpdateTests.randomDataFrameTransformConfigUpdate(), + UpdateTransformRequest updateDataFrameTransformRequest = new UpdateTransformRequest( + TransformConfigUpdateTests.randomTransformConfigUpdate(), transformId); - Request request = DataFrameRequestConverters.updateDataFrameTransform(updateDataFrameTransformRequest); + Request request = TransformRequestConverters.updateTransform(updateDataFrameTransformRequest); assertThat(request.getParameters(), not(hasKey("defer_validation"))); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/" + transformId + "/_update")); try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { - DataFrameTransformConfigUpdate parsedConfig = DataFrameTransformConfigUpdate.fromXContent(parser); + TransformConfigUpdate parsedConfig = TransformConfigUpdate.fromXContent(parser); assertThat(parsedConfig, equalTo(updateDataFrameTransformRequest.getUpdate())); } updateDataFrameTransformRequest.setDeferValidation(true); - request = DataFrameRequestConverters.updateDataFrameTransform(updateDataFrameTransformRequest); + request = TransformRequestConverters.updateTransform(updateDataFrameTransformRequest); assertThat(request.getParameters(), hasEntry("defer_validation", Boolean.toString(updateDataFrameTransformRequest.getDeferValidation()))); } public void testDeleteDataFrameTransform() { - DeleteDataFrameTransformRequest deleteRequest = new DeleteDataFrameTransformRequest("foo"); - Request request = DataFrameRequestConverters.deleteDataFrameTransform(deleteRequest); + DeleteTransformRequest deleteRequest = new DeleteTransformRequest("foo"); + Request request = TransformRequestConverters.deleteTransform(deleteRequest); assertEquals(HttpDelete.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/foo")); @@ -114,7 +114,7 @@ public class DataFrameRequestConvertersTests extends ESTestCase { assertThat(request.getParameters(), not(hasKey("force"))); deleteRequest.setForce(true); - request = DataFrameRequestConverters.deleteDataFrameTransform(deleteRequest); + request = TransformRequestConverters.deleteTransform(deleteRequest); assertThat(request.getParameters(), hasEntry("force", "true")); } @@ -125,9 +125,9 @@ public class DataFrameRequestConvertersTests extends ESTestCase { if (randomBoolean()) { timeValue = TimeValue.parseTimeValue(randomTimeValue(), "timeout"); } - StartDataFrameTransformRequest startRequest = new StartDataFrameTransformRequest(id, timeValue); + StartTransformRequest startRequest = new StartTransformRequest(id, timeValue); - Request request = DataFrameRequestConverters.startDataFrameTransform(startRequest); + Request request = TransformRequestConverters.startTransform(startRequest); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/" + startRequest.getId() + "/_start")); @@ -149,9 +149,9 @@ public class DataFrameRequestConvertersTests extends ESTestCase { if (randomBoolean()) { timeValue = TimeValue.parseTimeValue(randomTimeValue(), "timeout"); } - StopDataFrameTransformRequest stopRequest = new StopDataFrameTransformRequest(id, waitForCompletion, timeValue); + StopTransformRequest stopRequest = new StopTransformRequest(id, waitForCompletion, timeValue); - Request request = DataFrameRequestConverters.stopDataFrameTransform(stopRequest); + Request request = TransformRequestConverters.stopTransform(stopRequest); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/" + stopRequest.getId() + "/_stop")); @@ -171,27 +171,27 @@ public class DataFrameRequestConvertersTests extends ESTestCase { assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH)); stopRequest.setAllowNoMatch(randomBoolean()); - request = DataFrameRequestConverters.stopDataFrameTransform(stopRequest); + request = TransformRequestConverters.stopTransform(stopRequest); assertEquals(stopRequest.getAllowNoMatch(), Boolean.parseBoolean(request.getParameters().get(ALLOW_NO_MATCH))); } public void testPreviewDataFrameTransform() throws IOException { - PreviewDataFrameTransformRequest previewRequest = new PreviewDataFrameTransformRequest( - DataFrameTransformConfigTests.randomDataFrameTransformConfig()); - Request request = DataFrameRequestConverters.previewDataFrameTransform(previewRequest); + PreviewTransformRequest previewRequest = new PreviewTransformRequest( + TransformConfigTests.randomTransformConfig()); + Request request = TransformRequestConverters.previewTransform(previewRequest); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/_preview")); try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { - DataFrameTransformConfig parsedConfig = DataFrameTransformConfig.PARSER.apply(parser, null); + TransformConfig parsedConfig = TransformConfig.PARSER.apply(parser, null); assertThat(parsedConfig, equalTo(previewRequest.getConfig())); } } public void testGetDataFrameTransformStats() { - GetDataFrameTransformStatsRequest getStatsRequest = new GetDataFrameTransformStatsRequest("foo"); - Request request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest); + GetTransformStatsRequest getStatsRequest = new GetTransformStatsRequest("foo"); + Request request = TransformRequestConverters.getTransformStats(getStatsRequest); assertEquals(HttpGet.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/foo/_stats")); @@ -201,27 +201,27 @@ public class DataFrameRequestConvertersTests extends ESTestCase { assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH)); getStatsRequest.setPageParams(new PageParams(0, null)); - request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest); + request = TransformRequestConverters.getTransformStats(getStatsRequest); assertThat(request.getParameters(), hasEntry("from", "0")); assertEquals(null, request.getParameters().get("size")); getStatsRequest.setPageParams(new PageParams(null, 50)); - request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest); + request = TransformRequestConverters.getTransformStats(getStatsRequest); assertEquals(null, request.getParameters().get("from")); assertThat(request.getParameters(), hasEntry("size", "50")); getStatsRequest.setPageParams(new PageParams(0, 10)); - request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest); + request = TransformRequestConverters.getTransformStats(getStatsRequest); assertThat(request.getParameters(), allOf(hasEntry("from", "0"), hasEntry("size", "10"))); getStatsRequest.setAllowNoMatch(false); - request = DataFrameRequestConverters.getDataFrameTransformStats(getStatsRequest); + request = TransformRequestConverters.getTransformStats(getStatsRequest); assertThat(request.getParameters(), hasEntry("allow_no_match", "false")); } public void testGetDataFrameTransform() { - GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest("bar"); - Request request = DataFrameRequestConverters.getDataFrameTransform(getRequest); + GetTransformRequest getRequest = new GetTransformRequest("bar"); + Request request = TransformRequestConverters.getTransform(getRequest); assertEquals(HttpGet.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/bar")); @@ -231,27 +231,27 @@ public class DataFrameRequestConvertersTests extends ESTestCase { assertFalse(request.getParameters().containsKey(ALLOW_NO_MATCH)); getRequest.setPageParams(new PageParams(0, null)); - request = DataFrameRequestConverters.getDataFrameTransform(getRequest); + request = TransformRequestConverters.getTransform(getRequest); assertThat(request.getParameters(), hasEntry("from", "0")); assertEquals(null, request.getParameters().get("size")); getRequest.setPageParams(new PageParams(null, 50)); - request = DataFrameRequestConverters.getDataFrameTransform(getRequest); + request = TransformRequestConverters.getTransform(getRequest); assertEquals(null, request.getParameters().get("from")); assertThat(request.getParameters(), hasEntry("size", "50")); getRequest.setPageParams(new PageParams(0, 10)); - request = DataFrameRequestConverters.getDataFrameTransform(getRequest); + request = TransformRequestConverters.getTransform(getRequest); assertThat(request.getParameters(), allOf(hasEntry("from", "0"), hasEntry("size", "10"))); getRequest.setAllowNoMatch(false); - request = DataFrameRequestConverters.getDataFrameTransform(getRequest); + request = TransformRequestConverters.getTransform(getRequest); assertThat(request.getParameters(), hasEntry("allow_no_match", "false")); } public void testGetDataFrameTransform_givenMulitpleIds() { - GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest("foo", "bar", "baz"); - Request request = DataFrameRequestConverters.getDataFrameTransform(getRequest); + GetTransformRequest getRequest = new GetTransformRequest("foo", "bar", "baz"); + Request request = TransformRequestConverters.getTransform(getRequest); assertEquals(HttpGet.METHOD_NAME, request.getMethod()); assertThat(request.getEndpoint(), equalTo("/_data_frame/transforms/foo,bar,baz")); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/GrapRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/GraphRequestConvertersTests.java similarity index 93% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/GrapRequestConvertersTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/GraphRequestConvertersTests.java index 7c653bdee384..c45552f0debf 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/GrapRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/GraphRequestConvertersTests.java @@ -33,7 +33,7 @@ import java.util.Map; import static org.hamcrest.Matchers.is; -public class GrapRequestConvertersTests extends ESTestCase { +public class GraphRequestConvertersTests extends ESTestCase { public void testGraphExplore() throws Exception { Map expectedParams = new HashMap<>(); @@ -41,7 +41,6 @@ public class GrapRequestConvertersTests extends ESTestCase { GraphExploreRequest graphExploreRequest = new GraphExploreRequest(); graphExploreRequest.sampleDiversityField("diversity"); graphExploreRequest.indices("index1", "index2"); - graphExploreRequest.types("type1", "type2"); int timeout = randomIntBetween(10000, 20000); graphExploreRequest.timeout(TimeValue.timeValueMillis(timeout)); graphExploreRequest.useSignificance(randomBoolean()); @@ -58,7 +57,7 @@ public class GrapRequestConvertersTests extends ESTestCase { } Request request = GraphRequestConverters.explore(graphExploreRequest); assertEquals(HttpGet.METHOD_NAME, request.getMethod()); - assertEquals("/index1,index2/type1,type2/_graph/explore", request.getEndpoint()); + assertEquals("/index1,index2/_graph/explore", request.getEndpoint()); assertEquals(expectedParams, request.getParameters()); assertThat(request.getEntity().getContentType().getValue(), is(XContentType.JSON.mediaTypeWithoutParameters())); RequestConvertersTests.assertToXContentBody(graphExploreRequest, request.getEntity()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index c880c859dccd..43e4b26ac3f1 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -83,7 +83,6 @@ import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -100,12 +99,9 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.admin.indices.RestCreateIndexAction; -import org.elasticsearch.rest.action.admin.indices.RestGetFieldMappingAction; import org.elasticsearch.rest.action.admin.indices.RestGetIndexTemplateAction; import org.elasticsearch.rest.action.admin.indices.RestGetIndicesAction; -import org.elasticsearch.rest.action.admin.indices.RestGetMappingAction; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; -import org.elasticsearch.rest.action.admin.indices.RestPutMappingAction; import org.elasticsearch.rest.action.admin.indices.RestRolloverIndexAction; import java.io.IOException; @@ -541,31 +537,6 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { getIndexResponse)); } - public void testPutMappingWithTypes() throws IOException { - String indexName = "mapping_index"; - createIndex(indexName, Settings.EMPTY); - - org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest putMappingRequest = - new org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest(indexName); - putMappingRequest.type("some_type"); - - XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); - mappingBuilder.startObject().startObject("properties").startObject("field"); - mappingBuilder.field("type", "text"); - mappingBuilder.endObject().endObject().endObject(); - putMappingRequest.source(mappingBuilder); - - AcknowledgedResponse putMappingResponse = execute(putMappingRequest, - highLevelClient().indices()::putMapping, - highLevelClient().indices()::putMappingAsync, - expectWarnings(RestPutMappingAction.TYPES_DEPRECATION_MESSAGE)); - assertTrue(putMappingResponse.isAcknowledged()); - - Map getIndexResponse = getAsMap(indexName); - assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", - getIndexResponse)); - } - public void testGetMapping() throws IOException { String indexName = "test"; createIndex(indexName, Settings.EMPTY); @@ -602,44 +573,6 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { assertThat(mappings, equalTo(expected)); } - public void testGetMappingWithTypes() throws IOException { - String indexName = "test"; - createIndex(indexName, Settings.EMPTY); - - PutMappingRequest putMappingRequest = new PutMappingRequest(indexName); - XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); - mappingBuilder.startObject().startObject("properties").startObject("field"); - mappingBuilder.field("type", "text"); - mappingBuilder.endObject().endObject().endObject(); - putMappingRequest.source(mappingBuilder); - - AcknowledgedResponse putMappingResponse = execute(putMappingRequest, - highLevelClient().indices()::putMapping, - highLevelClient().indices()::putMappingAsync); - assertTrue(putMappingResponse.isAcknowledged()); - - Map getIndexResponse = getAsMap(indexName); - assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", getIndexResponse)); - - org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest request = - new org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest().indices(indexName); - - org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse getMappingsResponse = execute( - request, - highLevelClient().indices()::getMapping, - highLevelClient().indices()::getMappingAsync, - expectWarnings(RestGetMappingAction.TYPES_DEPRECATION_MESSAGE)); - - Map mappings = getMappingsResponse.getMappings().get(indexName).get("_doc").sourceAsMap(); - Map type = new HashMap<>(); - type.put("type", "text"); - Map field = new HashMap<>(); - field.put("field", type); - Map expected = new HashMap<>(); - expected.put("properties", field); - assertThat(mappings, equalTo(expected)); - } - public void testGetFieldMapping() throws IOException { String indexName = "test"; createIndex(indexName, Settings.EMPTY); @@ -673,42 +606,6 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { assertThat(fieldMappingMap, equalTo(Collections.singletonMap("field", metaData))); } - public void testGetFieldMappingWithTypes() throws IOException { - String indexName = "test"; - createIndex(indexName, Settings.EMPTY); - - PutMappingRequest putMappingRequest = new PutMappingRequest(indexName); - XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); - mappingBuilder.startObject().startObject("properties").startObject("field"); - mappingBuilder.field("type", "text"); - mappingBuilder.endObject().endObject().endObject(); - putMappingRequest.source(mappingBuilder); - - AcknowledgedResponse putMappingResponse = - execute(putMappingRequest, highLevelClient().indices()::putMapping, highLevelClient().indices()::putMappingAsync); - assertTrue(putMappingResponse.isAcknowledged()); - - org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest getFieldMappingsRequest = - new org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest() - .indices(indexName) - .types("_doc") - .fields("field"); - - org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse getFieldMappingsResponse = - execute(getFieldMappingsRequest, - highLevelClient().indices()::getFieldMapping, - highLevelClient().indices()::getFieldMappingAsync, - expectWarnings(RestGetFieldMappingAction.TYPES_DEPRECATION_MESSAGE)); - - final Map - fieldMappingMap = getFieldMappingsResponse.mappings().get(indexName).get("_doc"); - - final org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData metaData = - new org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData("field", - new BytesArray("{\"field\":{\"type\":\"text\"}}")); - assertThat(fieldMappingMap, equalTo(Collections.singletonMap("field", metaData))); - } - public void testDeleteIndex() throws IOException { { // Delete index if exists @@ -1553,8 +1450,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { @SuppressWarnings("unchecked") public void testPutTemplate() throws Exception { - PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest("my-template") - .patterns(Arrays.asList("pattern-1", "name-*")) + PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest("my-template", List.of("pattern-1", "name-*")) .order(10) .create(randomBoolean()) .settings(Settings.builder().put("number_of_shards", "3").put("number_of_replicas", "0")) @@ -1580,8 +1476,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { } public void testPutTemplateWithTypesUsingUntypedAPI() throws Exception { - PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest("my-template") - .patterns(Arrays.asList("pattern-1", "name-*")) + PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest("my-template", List.of("pattern-1", "name-*")) .order(10) .create(randomBoolean()) .settings(Settings.builder().put("number_of_shards", "3").put("number_of_replicas", "0")) @@ -1643,13 +1538,12 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { RestHighLevelClient client = highLevelClient(); // Failed to validate because index patterns are missing - PutIndexTemplateRequest withoutPattern = new PutIndexTemplateRequest("t1"); - ValidationException withoutPatternError = expectThrows(ValidationException.class, - () -> execute(withoutPattern, client.indices()::putTemplate, client.indices()::putTemplateAsync)); - assertThat(withoutPatternError.validationErrors(), contains("index patterns are missing")); + IllegalArgumentException withoutPatternError = expectThrows(IllegalArgumentException.class, + () -> new PutIndexTemplateRequest("t1", randomBoolean() ? null : List.of())); + assertThat(withoutPatternError.getMessage(), containsString("index patterns are missing")); // Create-only specified but an template exists already - PutIndexTemplateRequest goodTemplate = new PutIndexTemplateRequest("t2").patterns(Arrays.asList("qa-*", "prod-*")); + PutIndexTemplateRequest goodTemplate = new PutIndexTemplateRequest("t2", List.of("qa-*", "prod-*")); assertTrue(execute(goodTemplate, client.indices()::putTemplate, client.indices()::putTemplateAsync).isAcknowledged()); goodTemplate.create(true); ElasticsearchException alreadyExistsError = expectThrows(ElasticsearchException.class, @@ -1660,8 +1554,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { assertTrue(execute(goodTemplate, client.indices()::putTemplate, client.indices()::putTemplateAsync).isAcknowledged()); // Rejected due to unknown settings - PutIndexTemplateRequest unknownSettingTemplate = new PutIndexTemplateRequest("t3") - .patterns(Collections.singletonList("any")) + PutIndexTemplateRequest unknownSettingTemplate = new PutIndexTemplateRequest("t3", List.of("any")) .settings(Settings.builder().put("this-setting-does-not-exist", 100)); ElasticsearchStatusException unknownSettingError = expectThrows(ElasticsearchStatusException.class, () -> execute(unknownSettingTemplate, client.indices()::putTemplate, client.indices()::putTemplateAsync)); @@ -1792,12 +1685,11 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { public void testCRUDIndexTemplate() throws Exception { RestHighLevelClient client = highLevelClient(); - PutIndexTemplateRequest putTemplate1 = new PutIndexTemplateRequest("template-1") - .patterns(Arrays.asList("pattern-1", "name-1")).alias(new Alias("alias-1")); + PutIndexTemplateRequest putTemplate1 = new PutIndexTemplateRequest("template-1", List.of("pattern-1", "name-1")) + .alias(new Alias("alias-1")); assertThat(execute(putTemplate1, client.indices()::putTemplate, client.indices()::putTemplateAsync).isAcknowledged(), equalTo(true)); - PutIndexTemplateRequest putTemplate2 = new PutIndexTemplateRequest("template-2") - .patterns(Arrays.asList("pattern-2", "name-2")) + PutIndexTemplateRequest putTemplate2 = new PutIndexTemplateRequest("template-2", List.of("pattern-2", "name-2")) .mapping("{\"properties\": { \"name\": { \"type\": \"text\" }}}", XContentType.JSON) .settings(Settings.builder().put("number_of_shards", "2").put("number_of_replicas", "0")); assertThat(execute(putTemplate2, client.indices()::putTemplate, client.indices()::putTemplateAsync) @@ -1872,8 +1764,8 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { { for (String suffix : Arrays.asList("1", "2")) { - final PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest("template-" + suffix) - .patterns(Arrays.asList("pattern-" + suffix, "name-" + suffix)) + final PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest("template-" + suffix, + List.of("pattern-" + suffix, "name-" + suffix)) .alias(new Alias("alias-" + suffix)); assertTrue(execute(putRequest, client.indices()::putTemplate, client.indices()::putTemplateAsync).isAcknowledged()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java index ee3ec3b50cc4..a8a400fdcf30 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java @@ -1088,8 +1088,8 @@ public class IndicesRequestConvertersTests extends ESTestCase { names.put("foo^bar", "foo%5Ebar"); PutIndexTemplateRequest putTemplateRequest = - new PutIndexTemplateRequest(ESTestCase.randomFrom(names.keySet())) - .patterns(Arrays.asList(ESTestCase.generateRandomStringArray(20, 100, false, false))); + new PutIndexTemplateRequest(ESTestCase.randomFrom(names.keySet()), + List.of(ESTestCase.generateRandomStringArray(20, 100, false, false))); if (ESTestCase.randomBoolean()) { putTemplateRequest.order(ESTestCase.randomInt()); } @@ -1116,7 +1116,7 @@ public class IndicesRequestConvertersTests extends ESTestCase { putTemplateRequest.cause(cause); expectedParams.put("cause", cause); } - RequestConvertersTests.setRandomMasterTimeout(putTemplateRequest, expectedParams); + RequestConvertersTests.setRandomMasterTimeout(putTemplateRequest::masterNodeTimeout, expectedParams); Request request = IndicesRequestConverters.putTemplate(putTemplateRequest); Assert.assertThat(request.getEndpoint(), equalTo("/_template/" + names.get(putTemplateRequest.name()))); @@ -1125,7 +1125,6 @@ public class IndicesRequestConvertersTests extends ESTestCase { } public void testValidateQuery() throws Exception { String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5); - String[] types = ESTestCase.randomBoolean() ? ESTestCase.generateRandomStringArray(5, 5, false, false) : null; ValidateQueryRequest validateQueryRequest; if (ESTestCase.randomBoolean()) { validateQueryRequest = new ValidateQueryRequest(indices); @@ -1133,7 +1132,6 @@ public class IndicesRequestConvertersTests extends ESTestCase { validateQueryRequest = new ValidateQueryRequest(); validateQueryRequest.indices(indices); } - validateQueryRequest.types(types); Map expectedParams = new HashMap<>(); RequestConvertersTests.setRandomIndicesOptions(validateQueryRequest::indicesOptions, validateQueryRequest::indicesOptions, expectedParams); @@ -1147,9 +1145,6 @@ public class IndicesRequestConvertersTests extends ESTestCase { StringJoiner endpoint = new StringJoiner("/", "/", ""); if (indices != null && indices.length > 0) { endpoint.add(String.join(",", indices)); - if (types != null && types.length > 0) { - endpoint.add(String.join(",", types)); - } } endpoint.add("_validate/query"); Assert.assertThat(request.getEndpoint(), equalTo(endpoint.toString())); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index cd41d7be2b9f..9d1e04eb5630 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -872,7 +872,7 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { // b) is slightly more efficient since we may not need to wait an entire second for the timestamp to increment assertBusy(() -> { long timeNow = System.currentTimeMillis() / 1000; - assertFalse(prevJobTimeStamp >= timeNow); + assertThat(prevJobTimeStamp, lessThan(timeNow)); }); // Update snapshot timestamp to force it out of snapshot retention window @@ -890,7 +890,8 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { waitForForecastToComplete(jobId, forecastJobResponse.getForecastId()); // Wait for the forecast to expire - awaitBusy(() -> false, 1, TimeUnit.SECONDS); + // FIXME: We should wait for something specific to change, rather than waiting for time to pass. + waitUntil(() -> false, 1, TimeUnit.SECONDS); // Run up to now startDatafeed(datafeedId, String.valueOf(0), String.valueOf(nowMillis)); @@ -934,7 +935,9 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { assertTrue(response.getDeleted()); - awaitBusy(() -> false, 1, TimeUnit.SECONDS); + // Wait for the forecast to expire + // FIXME: We should wait for something specific to change, rather than waiting for time to pass. + waitUntil(() -> false, 1, TimeUnit.SECONDS); GetModelSnapshotsRequest getModelSnapshotsRequest1 = new GetModelSnapshotsRequest(jobId); GetModelSnapshotsResponse getModelSnapshotsResponse1 = execute(getModelSnapshotsRequest1, machineLearningClient::getModelSnapshots, @@ -2049,8 +2052,6 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { highLevelClient().update(updateSnapshotRequest, RequestOptions.DEFAULT); } - - private String createAndPutDatafeed(String jobId, String indexName) throws IOException { String datafeedId = jobId + "-feed"; DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationIT.java index 57b6e422bb1c..3396f3352e2c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationIT.java @@ -21,12 +21,10 @@ package org.elasticsearch.client; import org.elasticsearch.client.migration.DeprecationInfoRequest; import org.elasticsearch.client.migration.DeprecationInfoResponse; -import org.elasticsearch.client.tasks.TaskSubmissionResponse; import org.elasticsearch.common.settings.Settings; import java.io.IOException; import java.util.Collections; -import java.util.function.BooleanSupplier; import static org.hamcrest.Matchers.equalTo; @@ -42,20 +40,4 @@ public class MigrationIT extends ESRestHighLevelClientTestCase { assertThat(response.getNodeSettingsIssues().size(), equalTo(0)); assertThat(response.getMlSettingsIssues().size(), equalTo(0)); } - - /** - * Using low-level api as high-level-rest-client's getTaskById work is in progress. - * TODO revisit once that work is finished - */ - private BooleanSupplier checkCompletionStatus(TaskSubmissionResponse upgrade) { - return () -> { - try { - Response response = client().performRequest(new Request("GET", "/_tasks/" + upgrade.getTask())); - return (boolean) entityAsMap(response).get("completed"); - } catch (IOException e) { - fail(e.getMessage()); - return false; - } - }; - } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ReindexIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ReindexIT.java index f796e7068b7e..1c33a7e183e5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ReindexIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ReindexIT.java @@ -436,6 +436,47 @@ public class ReindexIT extends ESRestHighLevelClientTestCase { } } + public void testDeleteByQueryTask() throws Exception { + final String sourceIndex = "source456"; + { + // Prepare + Settings settings = Settings.builder() + .put("number_of_shards", 1) + .put("number_of_replicas", 0) + .build(); + createIndex(sourceIndex, settings); + assertEquals( + RestStatus.OK, + highLevelClient().bulk( + new BulkRequest() + .add(new IndexRequest(sourceIndex).id("1") + .source(Collections.singletonMap("foo", 1), XContentType.JSON)) + .add(new IndexRequest(sourceIndex).id("2") + .source(Collections.singletonMap("foo", 2), XContentType.JSON)) + .add(new IndexRequest(sourceIndex).id("3") + .source(Collections.singletonMap("foo", 3), XContentType.JSON)) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), + RequestOptions.DEFAULT + ).status() + ); + } + { + // tag::submit-delete_by_query-task + DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(); + deleteByQueryRequest.indices(sourceIndex); + deleteByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1")); + deleteByQueryRequest.setRefresh(true); + + TaskSubmissionResponse deleteByQuerySubmission = highLevelClient() + .submitDeleteByQueryTask(deleteByQueryRequest, RequestOptions.DEFAULT); + + String taskId = deleteByQuerySubmission.getTask(); + // end::submit-delete_by_query-task + + assertBusy(checkCompletionStatus(client(), taskId)); + } + } + private static TaskId findTaskToRethrottle(String actionName) throws IOException { long start = System.nanoTime(); ListTasksRequest request = new ListTasksRequest(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 436ae193ffcd..f75685b46fcb 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -71,7 +71,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.VersionType; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; @@ -158,7 +157,6 @@ public class RequestConvertersTests extends ESTestCase { } public void testSourceExistsWithType() throws IOException { - String type = frequently() ? randomAlphaOfLengthBetween(3, 10) : MapperService.SINGLE_MAPPING_NAME; doTestSourceExists((index, id) -> new GetRequest(index, id)); } @@ -362,17 +360,6 @@ public class RequestConvertersTests extends ESTestCase { assertEquals(method, request.getMethod()); } - private static void getAndExistsWithTypeTest(Function requestConverter, String method) { - String index = randomAlphaOfLengthBetween(3, 10); - String id = randomAlphaOfLengthBetween(3, 10); - GetRequest getRequest = new GetRequest(index, id); - - Request request = requestConverter.apply(getRequest); - assertEquals("/" + index + "/" + id, request.getEndpoint()); - assertNull(request.getEntity()); - assertEquals(method, request.getMethod()); - } - public void testReindex() throws IOException { ReindexRequest reindexRequest = new ReindexRequest(); reindexRequest.setSourceIndices("source_idx"); @@ -538,6 +525,7 @@ public class RequestConvertersTests extends ESTestCase { } setRandomIndicesOptions(deleteByQueryRequest::setIndicesOptions, deleteByQueryRequest::indicesOptions, expectedParams); setRandomTimeout(deleteByQueryRequest::setTimeout, ReplicationRequest.DEFAULT_TIMEOUT, expectedParams); + expectedParams.put("wait_for_completion", Boolean.TRUE.toString()); Request request = RequestConverters.deleteByQuery(deleteByQueryRequest); StringJoiner joiner = new StringJoiner("/", "/", ""); joiner.add(String.join(",", deleteByQueryRequest.indices())); @@ -1415,21 +1403,6 @@ public class RequestConvertersTests extends ESTestCase { assertToXContentBody(explainRequest, request.getEntity()); } - public void testExplainWithType() throws IOException { - String index = randomAlphaOfLengthBetween(3, 10); - String type = randomAlphaOfLengthBetween(3, 10); - String id = randomAlphaOfLengthBetween(3, 10); - - ExplainRequest explainRequest = new ExplainRequest(index, type, id); - explainRequest.query(QueryBuilders.termQuery(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10))); - - Request request = RequestConverters.explain(explainRequest); - assertEquals(HttpGet.METHOD_NAME, request.getMethod()); - assertEquals("/" + index + "/" + type + "/" + id + "/_explain", request.getEndpoint()); - - assertToXContentBody(explainRequest, request.getEntity()); - } - public void testTermVectors() throws IOException { String index = randomAlphaOfLengthBetween(3, 10); String id = randomAlphaOfLengthBetween(3, 10); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 1aed9b8a7c40..0498990ba237 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -65,6 +65,10 @@ import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.Binar import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.PrecisionMetric; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.RecallMetric; +import org.elasticsearch.client.ml.inference.trainedmodel.tree.Tree; +import org.elasticsearch.client.ml.inference.preprocessing.FrequencyEncoding; +import org.elasticsearch.client.ml.inference.preprocessing.OneHotEncoding; +import org.elasticsearch.client.ml.inference.preprocessing.TargetMeanEncoding; import org.elasticsearch.client.transform.transforms.SyncConfig; import org.elasticsearch.client.transform.transforms.TimeSyncConfig; import org.elasticsearch.common.CheckedFunction; @@ -95,6 +99,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; + import org.hamcrest.Matchers; import org.junit.Before; @@ -676,7 +681,7 @@ public class RestHighLevelClientTests extends ESTestCase { public void testProvidedNamedXContents() { List namedXContents = RestHighLevelClient.getProvidedNamedXContents(); - assertEquals(37, namedXContents.size()); + assertEquals(41, namedXContents.size()); Map, Integer> categories = new HashMap<>(); List names = new ArrayList<>(); for (NamedXContentRegistry.Entry namedXContent : namedXContents) { @@ -686,7 +691,7 @@ public class RestHighLevelClientTests extends ESTestCase { categories.put(namedXContent.categoryClass, counter + 1); } } - assertEquals("Had: " + categories, 9, categories.size()); + assertEquals("Had: " + categories, 11, categories.size()); assertEquals(Integer.valueOf(3), categories.get(Aggregation.class)); assertTrue(names.contains(ChildrenAggregationBuilder.NAME)); assertTrue(names.contains(MatrixStatsAggregationBuilder.NAME)); @@ -733,6 +738,10 @@ public class RestHighLevelClientTests extends ESTestCase { ConfusionMatrixMetric.NAME, MeanSquaredErrorMetric.NAME, RSquaredMetric.NAME)); + assertEquals(Integer.valueOf(3), categories.get(org.elasticsearch.client.ml.inference.preprocessing.PreProcessor.class)); + assertThat(names, hasItems(FrequencyEncoding.NAME, OneHotEncoding.NAME, TargetMeanEncoding.NAME)); + assertEquals(Integer.valueOf(1), categories.get(org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel.class)); + assertThat(names, hasItems(Tree.NAME)); } public void testApiNamingConventions() throws Exception { @@ -838,7 +847,7 @@ public class RestHighLevelClientTests extends ESTestCase { apiName.startsWith("index_lifecycle.") == false && apiName.startsWith("ccr.") == false && apiName.startsWith("enrich.") == false && - apiName.startsWith("data_frame") == false && + apiName.startsWith("transform.") == false && apiName.endsWith("freeze") == false && apiName.endsWith("reload_analyzers") == false && // IndicesClientIT.getIndexTemplate should be renamed "getTemplate" in version 8.0 when we diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index cf0772d8170e..adcb9083c2c5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -227,7 +227,6 @@ public class SearchIT extends ESRestHighLevelClientTestCase { assertEquals(5, searchResponse.getHits().getHits().length); for (SearchHit searchHit : searchResponse.getHits().getHits()) { assertEquals("index", searchHit.getIndex()); - assertEquals("type", searchHit.getType()); assertThat(Integer.valueOf(searchHit.getId()), both(greaterThan(0)).and(lessThan(6))); assertEquals(1.0f, searchHit.getScore(), 0); assertEquals(-1L, searchHit.getVersion()); @@ -252,7 +251,6 @@ public class SearchIT extends ESRestHighLevelClientTestCase { assertThat(searchResponse.getHits().getMaxScore(), greaterThan(0f)); SearchHit searchHit = searchResponse.getHits().getHits()[0]; assertEquals("index", searchHit.getIndex()); - assertEquals("type", searchHit.getType()); assertEquals("1", searchHit.getId()); assertThat(searchHit.getScore(), greaterThan(0f)); assertEquals(-1L, searchHit.getVersion()); @@ -1093,7 +1091,6 @@ public class SearchIT extends ESRestHighLevelClientTestCase { ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); assertThat(explainResponse.getIndex(), equalTo("index1")); - assertThat(explainResponse.getType(), equalTo("_doc")); assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1)); assertTrue(explainResponse.isExists()); assertTrue(explainResponse.isMatch()); @@ -1108,7 +1105,6 @@ public class SearchIT extends ESRestHighLevelClientTestCase { ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); assertThat(explainResponse.getIndex(), equalTo("index1")); - assertThat(explainResponse.getType(), equalTo("_doc")); assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1)); assertTrue(explainResponse.isExists()); assertTrue(explainResponse.isMatch()); @@ -1123,7 +1119,6 @@ public class SearchIT extends ESRestHighLevelClientTestCase { ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); assertThat(explainResponse.getIndex(), equalTo("index1")); - assertThat(explainResponse.getType(), equalTo("_doc")); assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1)); assertTrue(explainResponse.isExists()); assertFalse(explainResponse.isMatch()); @@ -1139,7 +1134,6 @@ public class SearchIT extends ESRestHighLevelClientTestCase { ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); assertThat(explainResponse.getIndex(), equalTo("index1")); - assertThat(explainResponse.getType(), equalTo("_doc")); assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1)); assertTrue(explainResponse.isExists()); assertFalse(explainResponse.isMatch()); @@ -1167,7 +1161,6 @@ public class SearchIT extends ESRestHighLevelClientTestCase { ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); assertThat(explainResponse.getIndex(), equalTo("index1")); - assertThat(explainResponse.getType(), equalTo("_doc")); assertThat(explainResponse.getId(), equalTo("999")); assertFalse(explainResponse.isExists()); assertFalse(explainResponse.isMatch()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityIT.java index abf65d19df3b..8122ff17648b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityIT.java @@ -190,7 +190,7 @@ public class SecurityIT extends ESRestHighLevelClientTestCase { .name(roleName) .clusterPrivileges(randomSubsetOf(randomInt(3), Role.ClusterPrivilegeName.ALL_ARRAY)) .indicesPrivileges( - randomArray(3, IndicesPrivileges[]::new, () -> IndicesPrivilegesTests.createNewRandom(randomAlphaOfLength(3)))) + randomArray(3, IndicesPrivileges[]::new, () -> IndicesPrivilegesTests.createNewRandom("{\"match_all\": {}}"))) .applicationResourcePrivileges(randomArray(3, ApplicationResourcePrivileges[]::new, () -> ApplicationResourcePrivilegesTests.createNewRandom(randomAlphaOfLength(3).toLowerCase(Locale.ROOT)))) .runAsPrivilege(randomArray(3, String[]::new, () -> randomAlphaOfLength(3))); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/TransformIT.java similarity index 59% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/TransformIT.java index b532b877fa95..23771c2bff92 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/DataFrameTransformIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/TransformIT.java @@ -28,24 +28,24 @@ import org.elasticsearch.client.core.AcknowledgedResponse; import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.CreateIndexResponse; -import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformResponse; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsResponse; -import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest; -import org.elasticsearch.client.transform.PreviewDataFrameTransformResponse; -import org.elasticsearch.client.transform.PutDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformResponse; -import org.elasticsearch.client.transform.StopDataFrameTransformRequest; -import org.elasticsearch.client.transform.StopDataFrameTransformResponse; -import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest; -import org.elasticsearch.client.transform.UpdateDataFrameTransformResponse; -import org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate; -import org.elasticsearch.client.transform.transforms.DataFrameTransformStats; +import org.elasticsearch.client.transform.DeleteTransformRequest; +import org.elasticsearch.client.transform.GetTransformRequest; +import org.elasticsearch.client.transform.GetTransformResponse; +import org.elasticsearch.client.transform.GetTransformStatsRequest; +import org.elasticsearch.client.transform.GetTransformStatsResponse; +import org.elasticsearch.client.transform.PreviewTransformRequest; +import org.elasticsearch.client.transform.PreviewTransformResponse; +import org.elasticsearch.client.transform.PutTransformRequest; +import org.elasticsearch.client.transform.StartTransformRequest; +import org.elasticsearch.client.transform.StartTransformResponse; +import org.elasticsearch.client.transform.StopTransformRequest; +import org.elasticsearch.client.transform.StopTransformResponse; +import org.elasticsearch.client.transform.UpdateTransformRequest; +import org.elasticsearch.client.transform.UpdateTransformResponse; +import org.elasticsearch.client.transform.transforms.TransformIndexerStats; +import org.elasticsearch.client.transform.transforms.TransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdate; +import org.elasticsearch.client.transform.transforms.TransformStats; import org.elasticsearch.client.transform.transforms.DestConfig; import org.elasticsearch.client.transform.transforms.SourceConfig; import org.elasticsearch.client.transform.transforms.TimeSyncConfig; @@ -78,7 +78,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.oneOf; -public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { +public class TransformIT extends ESRestHighLevelClientTestCase { private List transformsToClean = new ArrayList<>(); @@ -146,13 +146,13 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { @After public void cleanUpTransforms() throws Exception { for (String transformId : transformsToClean) { - highLevelClient().dataFrame().stopDataFrameTransform( - new StopDataFrameTransformRequest(transformId, Boolean.TRUE, null), RequestOptions.DEFAULT); + highLevelClient().transform().stopTransform( + new StopTransformRequest(transformId, Boolean.TRUE, null), RequestOptions.DEFAULT); } for (String transformId : transformsToClean) { - highLevelClient().dataFrame().deleteDataFrameTransform( - new DeleteDataFrameTransformRequest(transformId), RequestOptions.DEFAULT); + highLevelClient().transform().deleteTransform( + new DeleteTransformRequest(transformId), RequestOptions.DEFAULT); } transformsToClean = new ArrayList<>(); @@ -164,21 +164,21 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { createIndex(sourceIndex); String id = "test-crud"; - DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest"); + TransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest"); - DataFrameClient client = highLevelClient().dataFrame(); - AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform, - client::putDataFrameTransformAsync); + TransformClient client = highLevelClient().transform(); + AcknowledgedResponse ack = execute(new PutTransformRequest(transform), client::putTransform, + client::putTransformAsync); assertTrue(ack.isAcknowledged()); - ack = execute(new DeleteDataFrameTransformRequest(transform.getId()), client::deleteDataFrameTransform, - client::deleteDataFrameTransformAsync); + ack = execute(new DeleteTransformRequest(transform.getId()), client::deleteTransform, + client::deleteTransformAsync); assertTrue(ack.isAcknowledged()); // The second delete should fail ElasticsearchStatusException deleteError = expectThrows(ElasticsearchStatusException.class, - () -> execute(new DeleteDataFrameTransformRequest(transform.getId()), client::deleteDataFrameTransform, - client::deleteDataFrameTransformAsync)); + () -> execute(new DeleteTransformRequest(transform.getId()), client::deleteTransform, + client::deleteTransformAsync)); assertThat(deleteError.getMessage(), containsString("Transform with id [test-crud] could not be found")); } @@ -187,25 +187,25 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { createIndex(sourceIndex); String id = "test-update"; - DataFrameTransformConfig transform = validDataFrameTransformConfigBuilder(id, sourceIndex, "pivot-dest") + TransformConfig transform = validDataFrameTransformConfigBuilder(id, sourceIndex, "pivot-dest") .setSyncConfig(new TimeSyncConfig("timefield", TimeValue.timeValueSeconds(60))) .build(); - DataFrameClient client = highLevelClient().dataFrame(); - AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform, - client::putDataFrameTransformAsync); + TransformClient client = highLevelClient().transform(); + AcknowledgedResponse ack = execute(new PutTransformRequest(transform), client::putTransform, + client::putTransformAsync); assertTrue(ack.isAcknowledged()); String updatedDescription = "my new description"; - DataFrameTransformConfigUpdate update = DataFrameTransformConfigUpdate.builder().setDescription(updatedDescription).build(); - UpdateDataFrameTransformResponse response = execute( - new UpdateDataFrameTransformRequest(update, id), client::updateDataFrameTransform, - client::updateDataFrameTransformAsync); + TransformConfigUpdate update = TransformConfigUpdate.builder().setDescription(updatedDescription).build(); + UpdateTransformResponse response = execute( + new UpdateTransformRequest(update, id), client::updateTransform, + client::updateTransformAsync); assertThat(response.getTransformConfiguration().getDescription(), equalTo(updatedDescription)); ElasticsearchStatusException updateError = expectThrows(ElasticsearchStatusException.class, - () -> execute(new UpdateDataFrameTransformRequest(update, "missing-transform"), client::updateDataFrameTransform, - client::updateDataFrameTransformAsync)); + () -> execute(new UpdateTransformRequest(update, "missing-transform"), client::updateTransform, + client::updateTransformAsync)); assertThat(updateError.getMessage(), containsString("Transform with id [missing-transform] could not be found")); } @@ -213,15 +213,15 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { String sourceIndex = "missing-source-index"; String id = "test-with-defer"; - DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest"); - DataFrameClient client = highLevelClient().dataFrame(); - PutDataFrameTransformRequest request = new PutDataFrameTransformRequest(transform); + TransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest"); + TransformClient client = highLevelClient().transform(); + PutTransformRequest request = new PutTransformRequest(transform); request.setDeferValidation(true); - AcknowledgedResponse ack = execute(request, client::putDataFrameTransform, client::putDataFrameTransformAsync); + AcknowledgedResponse ack = execute(request, client::putTransform, client::putTransformAsync); assertTrue(ack.isAcknowledged()); - ack = execute(new DeleteDataFrameTransformRequest(transform.getId()), client::deleteDataFrameTransform, - client::deleteDataFrameTransformAsync); + ack = execute(new DeleteTransformRequest(transform.getId()), client::deleteTransform, + client::deleteTransformAsync); assertTrue(ack.isAcknowledged()); } @@ -230,14 +230,14 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { createIndex(sourceIndex); String id = "test-get"; - DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest"); + TransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest"); - DataFrameClient client = highLevelClient().dataFrame(); + TransformClient client = highLevelClient().transform(); putTransform(transform); - GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest(id); - GetDataFrameTransformResponse getResponse = execute(getRequest, client::getDataFrameTransform, - client::getDataFrameTransformAsync); + GetTransformRequest getRequest = new GetTransformRequest(id); + GetTransformResponse getResponse = execute(getRequest, client::getTransform, + client::getTransformAsync); assertNull(getResponse.getInvalidTransforms()); assertThat(getResponse.getTransformConfigurations(), hasSize(1)); assertEquals(transform.getId(), getResponse.getTransformConfigurations().get(0).getId()); @@ -247,40 +247,40 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { String sourceIndex = "transform-source"; createIndex(sourceIndex); - DataFrameClient client = highLevelClient().dataFrame(); + TransformClient client = highLevelClient().transform(); - DataFrameTransformConfig transform = validDataFrameTransformConfig("test-get-all-1", sourceIndex, "pivot-dest-1"); + TransformConfig transform = validDataFrameTransformConfig("test-get-all-1", sourceIndex, "pivot-dest-1"); putTransform(transform); transform = validDataFrameTransformConfig("test-get-all-2", sourceIndex, "pivot-dest-2"); putTransform(transform); - GetDataFrameTransformRequest getRequest = new GetDataFrameTransformRequest("_all"); - GetDataFrameTransformResponse getResponse = execute(getRequest, client::getDataFrameTransform, - client::getDataFrameTransformAsync); + GetTransformRequest getRequest = new GetTransformRequest("_all"); + GetTransformResponse getResponse = execute(getRequest, client::getTransform, + client::getTransformAsync); assertNull(getResponse.getInvalidTransforms()); assertThat(getResponse.getTransformConfigurations(), hasSize(2)); assertEquals(transform.getId(), getResponse.getTransformConfigurations().get(1).getId()); getRequest.setPageParams(new PageParams(0,1)); - getResponse = execute(getRequest, client::getDataFrameTransform, - client::getDataFrameTransformAsync); + getResponse = execute(getRequest, client::getTransform, + client::getTransformAsync); assertNull(getResponse.getInvalidTransforms()); assertThat(getResponse.getTransformConfigurations(), hasSize(1)); - GetDataFrameTransformRequest getMulitple = new GetDataFrameTransformRequest("test-get-all-1", "test-get-all-2"); - getResponse = execute(getMulitple, client::getDataFrameTransform, - client::getDataFrameTransformAsync); + GetTransformRequest getMulitple = new GetTransformRequest("test-get-all-1", "test-get-all-2"); + getResponse = execute(getMulitple, client::getTransform, + client::getTransformAsync); assertNull(getResponse.getInvalidTransforms()); assertThat(getResponse.getTransformConfigurations(), hasSize(2)); } public void testGetMissingTransform() { - DataFrameClient client = highLevelClient().dataFrame(); + TransformClient client = highLevelClient().transform(); ElasticsearchStatusException missingError = expectThrows(ElasticsearchStatusException.class, - () -> execute(new GetDataFrameTransformRequest("unknown"), client::getDataFrameTransform, - client::getDataFrameTransformAsync)); + () -> execute(new GetTransformRequest("unknown"), client::getTransform, + client::getTransformAsync)); assertThat(missingError.status(), equalTo(RestStatus.NOT_FOUND)); } @@ -289,39 +289,39 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { createIndex(sourceIndex); String id = "test-stop-start"; - DataFrameTransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest"); + TransformConfig transform = validDataFrameTransformConfig(id, sourceIndex, "pivot-dest"); - DataFrameClient client = highLevelClient().dataFrame(); + TransformClient client = highLevelClient().transform(); putTransform(transform); - StartDataFrameTransformRequest startRequest = new StartDataFrameTransformRequest(id); - StartDataFrameTransformResponse startResponse = - execute(startRequest, client::startDataFrameTransform, client::startDataFrameTransformAsync); + StartTransformRequest startRequest = new StartTransformRequest(id); + StartTransformResponse startResponse = + execute(startRequest, client::startTransform, client::startTransformAsync); assertTrue(startResponse.isAcknowledged()); assertThat(startResponse.getNodeFailures(), empty()); assertThat(startResponse.getTaskFailures(), empty()); - GetDataFrameTransformStatsResponse statsResponse = execute(new GetDataFrameTransformStatsRequest(id), - client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync); + GetTransformStatsResponse statsResponse = execute(new GetTransformStatsRequest(id), + client::getTransformStats, client::getTransformStatsAsync); assertThat(statsResponse.getTransformsStats(), hasSize(1)); - DataFrameTransformStats.State taskState = statsResponse.getTransformsStats().get(0).getState(); + TransformStats.State taskState = statsResponse.getTransformsStats().get(0).getState(); // Since we are non-continuous, the transform could auto-stop between being started earlier and us gathering the statistics - assertThat(taskState, oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING, - DataFrameTransformStats.State.STOPPING, DataFrameTransformStats.State.STOPPED)); + assertThat(taskState, oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING, + TransformStats.State.STOPPING, TransformStats.State.STOPPED)); - StopDataFrameTransformRequest stopRequest = new StopDataFrameTransformRequest(id, Boolean.TRUE, null); - StopDataFrameTransformResponse stopResponse = - execute(stopRequest, client::stopDataFrameTransform, client::stopDataFrameTransformAsync); + StopTransformRequest stopRequest = new StopTransformRequest(id, Boolean.TRUE, null); + StopTransformResponse stopResponse = + execute(stopRequest, client::stopTransform, client::stopTransformAsync); assertTrue(stopResponse.isAcknowledged()); assertThat(stopResponse.getNodeFailures(), empty()); assertThat(stopResponse.getTaskFailures(), empty()); // Calling stop with wait_for_completion assures that we will be in the `STOPPED` state for the transform task - statsResponse = execute(new GetDataFrameTransformStatsRequest(id), - client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync); + statsResponse = execute(new GetTransformStatsRequest(id), + client::getTransformStats, client::getTransformStatsAsync); taskState = statsResponse.getTransformsStats().get(0).getState(); - assertThat(taskState, is(DataFrameTransformStats.State.STOPPED)); + assertThat(taskState, is(TransformStats.State.STOPPED)); } @SuppressWarnings("unchecked") @@ -330,12 +330,12 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { createIndex(sourceIndex); indexData(sourceIndex); - DataFrameTransformConfig transform = validDataFrameTransformConfig("test-preview", sourceIndex, null); + TransformConfig transform = validDataFrameTransformConfig("test-preview", sourceIndex, null); - DataFrameClient client = highLevelClient().dataFrame(); - PreviewDataFrameTransformResponse preview = execute(new PreviewDataFrameTransformRequest(transform), - client::previewDataFrameTransform, - client::previewDataFrameTransformAsync); + TransformClient client = highLevelClient().transform(); + PreviewTransformResponse preview = execute(new PreviewTransformRequest(transform), + client::previewTransform, + client::previewTransformAsync); List> docs = preview.getDocs(); assertThat(docs, hasSize(2)); @@ -354,11 +354,11 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { assertThat(fields.get("avg_rating"), equalTo(Map.of("type", "double"))); } - private DataFrameTransformConfig validDataFrameTransformConfig(String id, String source, String destination) { + private TransformConfig validDataFrameTransformConfig(String id, String source, String destination) { return validDataFrameTransformConfigBuilder(id, source, destination).build(); } - private DataFrameTransformConfig.Builder validDataFrameTransformConfigBuilder(String id, String source, String destination) { + private TransformConfig.Builder validDataFrameTransformConfigBuilder(String id, String source, String destination) { GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer", TermsGroupSource.builder().setField("user_id").build()).build(); AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder(); @@ -367,7 +367,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { DestConfig destConfig = (destination != null) ? DestConfig.builder().setIndex(destination).build() : null; - return DataFrameTransformConfig.builder() + return TransformConfig.builder() .setId(id) .setSource(SourceConfig.builder().setIndex(source).setQuery(new MatchAllQueryBuilder()).build()) .setDest(destConfig) @@ -388,7 +388,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregations(aggBuilder).build(); String id = "test-get-stats"; - DataFrameTransformConfig transform = DataFrameTransformConfig.builder() + TransformConfig transform = TransformConfig.builder() .setId(id) .setSource(SourceConfig.builder().setIndex(sourceIndex).setQuery(new MatchAllQueryBuilder()).build()) .setDest(DestConfig.builder().setIndex("pivot-dest").build()) @@ -396,17 +396,17 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { .setDescription("transform for testing stats") .build(); - DataFrameClient client = highLevelClient().dataFrame(); + TransformClient client = highLevelClient().transform(); putTransform(transform); - GetDataFrameTransformStatsResponse statsResponse = execute(new GetDataFrameTransformStatsRequest(id), - client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync); + GetTransformStatsResponse statsResponse = execute(new GetTransformStatsRequest(id), + client::getTransformStats, client::getTransformStatsAsync); assertEquals(1, statsResponse.getTransformsStats().size()); - DataFrameTransformStats stats = statsResponse.getTransformsStats().get(0); - assertEquals(DataFrameTransformStats.State.STOPPED, stats.getState()); + TransformStats stats = statsResponse.getTransformsStats().get(0); + assertEquals(TransformStats.State.STOPPED, stats.getState()); - DataFrameIndexerTransformStats zeroIndexerStats = new DataFrameIndexerTransformStats( + TransformIndexerStats zeroIndexerStats = new TransformIndexerStats( 0L, 0L, 0L, @@ -423,25 +423,25 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase { assertEquals(zeroIndexerStats, stats.getIndexerStats()); // start the transform - StartDataFrameTransformResponse startTransformResponse = execute(new StartDataFrameTransformRequest(id), - client::startDataFrameTransform, - client::startDataFrameTransformAsync); + StartTransformResponse startTransformResponse = execute(new StartTransformRequest(id), + client::startTransform, + client::startTransformAsync); assertThat(startTransformResponse.isAcknowledged(), is(true)); assertBusy(() -> { - GetDataFrameTransformStatsResponse response = execute(new GetDataFrameTransformStatsRequest(id), - client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync); - DataFrameTransformStats stateAndStats = response.getTransformsStats().get(0); + GetTransformStatsResponse response = execute(new GetTransformStatsRequest(id), + client::getTransformStats, client::getTransformStatsAsync); + TransformStats stateAndStats = response.getTransformsStats().get(0); assertNotEquals(zeroIndexerStats, stateAndStats.getIndexerStats()); - assertThat(stateAndStats.getState(), oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING, - DataFrameTransformStats.State.STOPPING, DataFrameTransformStats.State.STOPPED)); + assertThat(stateAndStats.getState(), oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING, + TransformStats.State.STOPPING, TransformStats.State.STOPPED)); assertThat(stateAndStats.getReason(), is(nullValue())); }); } - void putTransform(DataFrameTransformConfig config) throws IOException { - DataFrameClient client = highLevelClient().dataFrame(); - AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(config), client::putDataFrameTransform, - client::putDataFrameTransformAsync); + void putTransform(TransformConfig config) throws IOException { + TransformClient client = highLevelClient().transform(); + AcknowledgedResponse ack = execute(new PutTransformRequest(config), client::putTransform, + client::putTransformAsync); assertTrue(ack.isAcknowledged()); transformsToClean.add(config.getId()); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 607a11590e52..601ff20030de 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -100,7 +100,6 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import java.io.IOException; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -2144,8 +2143,10 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase RestHighLevelClient client = highLevelClient(); // tag::put-template-request - PutIndexTemplateRequest request = new PutIndexTemplateRequest("my-template"); // <1> - request.patterns(Arrays.asList("pattern-1", "log-*")); // <2> + PutIndexTemplateRequest request = new PutIndexTemplateRequest( + "my-template", // <1> + List.of("pattern-1", "log-*") // <2> + ); // end::put-template-request // tag::put-template-request-settings @@ -2292,8 +2293,8 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase public void testGetTemplates() throws Exception { RestHighLevelClient client = highLevelClient(); { - PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest("my-template"); - putRequest.patterns(Arrays.asList("pattern-1", "log-*")); + PutIndexTemplateRequest putRequest = + new PutIndexTemplateRequest("my-template", List.of("pattern-1", "log-*")); putRequest.settings(Settings.builder().put("index.number_of_shards", 3).put("index.number_of_replicas", 1)); putRequest.mapping( "{\n" + @@ -2357,8 +2358,8 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase public void testTemplatesExist() throws Exception { final RestHighLevelClient client = highLevelClient(); { - final PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest("my-template"); - putRequest.patterns(Collections.singletonList("foo")); + final PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest("my-template", + List.of("foo")); assertTrue(client.indices().putTemplate(putRequest, RequestOptions.DEFAULT).isAcknowledged()); } @@ -2767,8 +2768,8 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase public void testDeleteTemplate() throws Exception { RestHighLevelClient client = highLevelClient(); { - PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest("my-template"); - putRequest.patterns(Arrays.asList("pattern-1", "log-*")); + PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest("my-template", + List.of("pattern-1", "log-*")); putRequest.settings(Settings.builder().put("index.number_of_shards", 3)); assertTrue(client.indices().putTemplate(putRequest, RequestOptions.DEFAULT).isAcknowledged()); } @@ -2793,8 +2794,8 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase assertThat(acknowledged, equalTo(true)); { - PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest("my-template"); - putRequest.patterns(Arrays.asList("pattern-1", "log-*")); + PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest("my-template", + List.of("pattern-1", "log-*")); putRequest.settings(Settings.builder().put("index.number_of_shards", 3)); assertTrue(client.indices().putTemplate(putRequest, RequestOptions.DEFAULT).isAcknowledged()); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TransformDocumentationIT.java similarity index 74% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TransformDocumentationIT.java index 723594bf498e..537ad29efa4e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DataFrameTransformDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TransformDocumentationIT.java @@ -28,25 +28,25 @@ import org.elasticsearch.client.core.AcknowledgedResponse; import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.CreateIndexResponse; -import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformResponse; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsResponse; -import org.elasticsearch.client.transform.PreviewDataFrameTransformRequest; -import org.elasticsearch.client.transform.PreviewDataFrameTransformResponse; -import org.elasticsearch.client.transform.PutDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformResponse; -import org.elasticsearch.client.transform.StopDataFrameTransformRequest; -import org.elasticsearch.client.transform.StopDataFrameTransformResponse; -import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest; -import org.elasticsearch.client.transform.UpdateDataFrameTransformResponse; -import org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate; -import org.elasticsearch.client.transform.transforms.DataFrameTransformProgress; -import org.elasticsearch.client.transform.transforms.DataFrameTransformStats; +import org.elasticsearch.client.transform.DeleteTransformRequest; +import org.elasticsearch.client.transform.GetTransformRequest; +import org.elasticsearch.client.transform.GetTransformResponse; +import org.elasticsearch.client.transform.GetTransformStatsRequest; +import org.elasticsearch.client.transform.GetTransformStatsResponse; +import org.elasticsearch.client.transform.PreviewTransformRequest; +import org.elasticsearch.client.transform.PreviewTransformResponse; +import org.elasticsearch.client.transform.PutTransformRequest; +import org.elasticsearch.client.transform.StartTransformRequest; +import org.elasticsearch.client.transform.StartTransformResponse; +import org.elasticsearch.client.transform.StopTransformRequest; +import org.elasticsearch.client.transform.StopTransformResponse; +import org.elasticsearch.client.transform.UpdateTransformRequest; +import org.elasticsearch.client.transform.UpdateTransformResponse; +import org.elasticsearch.client.transform.transforms.TransformIndexerStats; +import org.elasticsearch.client.transform.transforms.TransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdate; +import org.elasticsearch.client.transform.transforms.TransformProgress; +import org.elasticsearch.client.transform.transforms.TransformStats; import org.elasticsearch.client.transform.transforms.DestConfig; import org.elasticsearch.client.transform.transforms.NodeAttributes; import org.elasticsearch.client.transform.transforms.QueryConfig; @@ -73,20 +73,20 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTestCase { +public class TransformDocumentationIT extends ESRestHighLevelClientTestCase { private List transformsToClean = new ArrayList<>(); @After public void cleanUpTransforms() throws Exception { for (String transformId : transformsToClean) { - highLevelClient().dataFrame().stopDataFrameTransform( - new StopDataFrameTransformRequest(transformId, Boolean.TRUE, TimeValue.timeValueSeconds(20)), RequestOptions.DEFAULT); + highLevelClient().transform().stopTransform( + new StopTransformRequest(transformId, Boolean.TRUE, TimeValue.timeValueSeconds(20)), RequestOptions.DEFAULT); } for (String transformId : transformsToClean) { - highLevelClient().dataFrame().deleteDataFrameTransform( - new DeleteDataFrameTransformRequest(transformId), RequestOptions.DEFAULT); + highLevelClient().transform().deleteTransform( + new DeleteTransformRequest(transformId), RequestOptions.DEFAULT); } transformsToClean = new ArrayList<>(); @@ -116,7 +116,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest assertTrue(response.isAcknowledged()); } - public void testPutDataFrameTransform() throws IOException, InterruptedException { + public void testPutTransform() throws IOException, InterruptedException { createIndex("source-index"); RestHighLevelClient client = highLevelClient(); @@ -154,7 +154,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest .build(); // end::put-transform-pivot-config // tag::put-transform-config - DataFrameTransformConfig transformConfig = DataFrameTransformConfig + TransformConfig transformConfig = TransformConfig .builder() .setId("reviewer-avg-rating") // <1> .setSource(sourceConfig) // <2> @@ -167,14 +167,14 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest { // tag::put-transform-request - PutDataFrameTransformRequest request = - new PutDataFrameTransformRequest(transformConfig); // <1> + PutTransformRequest request = + new PutTransformRequest(transformConfig); // <1> request.setDeferValidation(false); // <2> // end::put-transform-request // tag::put-transform-execute AcknowledgedResponse response = - client.dataFrame().putDataFrameTransform( + client.transform().putTransform( request, RequestOptions.DEFAULT); // end::put-transform-execute transformsToClean.add(request.getConfig().getId()); @@ -182,13 +182,13 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest assertTrue(response.isAcknowledged()); } { - DataFrameTransformConfig configWithDifferentId = DataFrameTransformConfig.builder() + TransformConfig configWithDifferentId = TransformConfig.builder() .setId("reviewer-avg-rating2") .setSource(transformConfig.getSource()) .setDest(transformConfig.getDestination()) .setPivotConfig(transformConfig.getPivotConfig()) .build(); - PutDataFrameTransformRequest request = new PutDataFrameTransformRequest(configWithDifferentId); + PutTransformRequest request = new PutTransformRequest(configWithDifferentId); // tag::put-transform-execute-listener ActionListener listener = @@ -210,7 +210,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest listener = new LatchedActionListener<>(listener, latch); // tag::put-transform-execute-async - client.dataFrame().putDataFrameTransformAsync( + client.transform().putTransformAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::put-transform-execute-async @@ -231,7 +231,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest AggregationConfig aggConfig = new AggregationConfig(aggBuilder); PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build(); - DataFrameTransformConfig transformConfig = DataFrameTransformConfig.builder() + TransformConfig transformConfig = TransformConfig.builder() .setId("my-transform-to-update") .setSource(SourceConfig.builder().setIndex("source-data").setQueryConfig(queryConfig).build()) .setDest(DestConfig.builder().setIndex("pivot-dest").build()) @@ -239,11 +239,11 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest .setSyncConfig(new TimeSyncConfig("time-field", TimeValue.timeValueSeconds(120))) .build(); - client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig), RequestOptions.DEFAULT); + client.transform().putTransform(new PutTransformRequest(transformConfig), RequestOptions.DEFAULT); transformsToClean.add(transformConfig.getId()); // tag::update-transform-config - DataFrameTransformConfigUpdate update = DataFrameTransformConfigUpdate + TransformConfigUpdate update = TransformConfigUpdate .builder() .setSource(SourceConfig.builder() .setIndex("source-data") @@ -260,32 +260,32 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest { // tag::update-transform-request - UpdateDataFrameTransformRequest request = - new UpdateDataFrameTransformRequest( + UpdateTransformRequest request = + new UpdateTransformRequest( update, // <1> "my-transform-to-update"); // <2> request.setDeferValidation(false); // <3> // end::update-transform-request // tag::update-transform-execute - UpdateDataFrameTransformResponse response = - client.dataFrame().updateDataFrameTransform(request, + UpdateTransformResponse response = + client.transform().updateTransform(request, RequestOptions.DEFAULT); - DataFrameTransformConfig updatedConfig = + TransformConfig updatedConfig = response.getTransformConfiguration(); // end::update-transform-execute assertThat(updatedConfig.getDescription(), equalTo("This is my updated transform")); } { - UpdateDataFrameTransformRequest request = new UpdateDataFrameTransformRequest(update, + UpdateTransformRequest request = new UpdateTransformRequest(update, "my-transform-to-update"); // tag::update-transform-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(UpdateDataFrameTransformResponse response) { + public void onResponse(UpdateTransformResponse response) { // <1> } @@ -301,7 +301,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest listener = new LatchedActionListener<>(listener, latch); // tag::update-transform-execute-async - client.dataFrame().updateDataFrameTransformAsync( + client.transform().updateTransformAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::update-transform-execute-async @@ -322,20 +322,20 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest AggregationConfig aggConfig = new AggregationConfig(aggBuilder); PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build(); - DataFrameTransformConfig transformConfig = DataFrameTransformConfig.builder() + TransformConfig transformConfig = TransformConfig.builder() .setId("mega-transform") .setSource(SourceConfig.builder().setIndex("source-data").setQueryConfig(queryConfig).build()) .setDest(DestConfig.builder().setIndex("pivot-dest").build()) .setPivotConfig(pivotConfig) .build(); - client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig), RequestOptions.DEFAULT); + client.transform().putTransform(new PutTransformRequest(transformConfig), RequestOptions.DEFAULT); transformsToClean.add(transformConfig.getId()); { // tag::start-transform-request - StartDataFrameTransformRequest request = - new StartDataFrameTransformRequest("mega-transform"); // <1> + StartTransformRequest request = + new StartTransformRequest("mega-transform"); // <1> // end::start-transform-request // tag::start-transform-request-options @@ -343,8 +343,8 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest // end::start-transform-request-options // tag::start-transform-execute - StartDataFrameTransformResponse response = - client.dataFrame().startDataFrameTransform( + StartTransformResponse response = + client.transform().startTransform( request, RequestOptions.DEFAULT); // end::start-transform-execute @@ -352,8 +352,8 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest } { // tag::stop-transform-request - StopDataFrameTransformRequest request = - new StopDataFrameTransformRequest("mega-transform"); // <1> + StopTransformRequest request = + new StopTransformRequest("mega-transform"); // <1> // end::stop-transform-request // tag::stop-transform-request-options @@ -363,8 +363,8 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest // end::stop-transform-request-options // tag::stop-transform-execute - StopDataFrameTransformResponse response = - client.dataFrame().stopDataFrameTransform( + StopTransformResponse response = + client.transform().stopTransform( request, RequestOptions.DEFAULT); // end::stop-transform-execute @@ -372,11 +372,11 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest } { // tag::start-transform-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override public void onResponse( - StartDataFrameTransformResponse response) { + StartTransformResponse response) { // <1> } @@ -391,9 +391,9 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - StartDataFrameTransformRequest request = new StartDataFrameTransformRequest("mega-transform"); + StartTransformRequest request = new StartTransformRequest("mega-transform"); // tag::start-transform-execute-async - client.dataFrame().startDataFrameTransformAsync( + client.transform().startTransformAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::start-transform-execute-async @@ -401,11 +401,11 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest } { // tag::stop-transform-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override public void onResponse( - StopDataFrameTransformResponse response) { + StopTransformResponse response) { // <1> } @@ -420,9 +420,9 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - StopDataFrameTransformRequest request = new StopDataFrameTransformRequest("mega-transform"); + StopTransformRequest request = new StopTransformRequest("mega-transform"); // tag::stop-transform-execute-async - client.dataFrame().stopDataFrameTransformAsync( + client.transform().stopTransformAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::stop-transform-execute-async @@ -442,7 +442,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest AggregationConfig aggConfig = new AggregationConfig(aggBuilder); PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build(); - DataFrameTransformConfig transformConfig1 = DataFrameTransformConfig.builder() + TransformConfig transformConfig1 = TransformConfig.builder() .setId("mega-transform") .setSource(SourceConfig.builder() .setIndex("source-data") @@ -451,7 +451,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest .setDest(DestConfig.builder().setIndex("pivot-dest").build()) .setPivotConfig(pivotConfig) .build(); - DataFrameTransformConfig transformConfig2 = DataFrameTransformConfig.builder() + TransformConfig transformConfig2 = TransformConfig.builder() .setId("mega-transform2") .setSource(SourceConfig.builder() .setIndex("source-data") @@ -461,20 +461,20 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest .setPivotConfig(pivotConfig) .build(); - client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig1), RequestOptions.DEFAULT); - client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig2), RequestOptions.DEFAULT); + client.transform().putTransform(new PutTransformRequest(transformConfig1), RequestOptions.DEFAULT); + client.transform().putTransform(new PutTransformRequest(transformConfig2), RequestOptions.DEFAULT); { // tag::delete-transform-request - DeleteDataFrameTransformRequest request = - new DeleteDataFrameTransformRequest("mega-transform"); // <1> + DeleteTransformRequest request = + new DeleteTransformRequest("mega-transform"); // <1> request.setForce(false); // <2> // end::delete-transform-request // tag::delete-transform-execute AcknowledgedResponse response = - client.dataFrame() - .deleteDataFrameTransform(request, RequestOptions.DEFAULT); + client.transform() + .deleteTransform(request, RequestOptions.DEFAULT); // end::delete-transform-execute assertTrue(response.isAcknowledged()); @@ -499,10 +499,10 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - DeleteDataFrameTransformRequest request = new DeleteDataFrameTransformRequest("mega-transform2"); + DeleteTransformRequest request = new DeleteTransformRequest("mega-transform2"); // tag::delete-transform-execute-async - client.dataFrame().deleteDataFrameTransformAsync( + client.transform().deleteTransformAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::delete-transform-execute-async @@ -524,23 +524,23 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build(); // tag::preview-transform-request - DataFrameTransformConfig transformConfig = - DataFrameTransformConfig.forPreview( + TransformConfig transformConfig = + TransformConfig.forPreview( SourceConfig.builder() .setIndex("source-data") .setQueryConfig(queryConfig) .build(), // <1> pivotConfig); // <2> - PreviewDataFrameTransformRequest request = - new PreviewDataFrameTransformRequest(transformConfig); // <3> + PreviewTransformRequest request = + new PreviewTransformRequest(transformConfig); // <3> // end::preview-transform-request { // tag::preview-transform-execute - PreviewDataFrameTransformResponse response = - client.dataFrame() - .previewDataFrameTransform(request, RequestOptions.DEFAULT); + PreviewTransformResponse response = + client.transform() + .previewTransform(request, RequestOptions.DEFAULT); // end::preview-transform-execute assertNotNull(response.getDocs()); @@ -548,10 +548,10 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest } { // tag::preview-transform-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(PreviewDataFrameTransformResponse response) { + public void onResponse(PreviewTransformResponse response) { // <1> } @@ -567,7 +567,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest listener = new LatchedActionListener<>(listener, latch); // tag::preview-transform-execute-async - client.dataFrame().previewDataFrameTransformAsync( + client.transform().previewTransformAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::preview-transform-execute-async @@ -588,7 +588,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build(); String id = "statisitcal-transform"; - DataFrameTransformConfig transformConfig = DataFrameTransformConfig.builder() + TransformConfig transformConfig = TransformConfig.builder() .setId(id) .setSource(SourceConfig.builder() .setIndex("source-data") @@ -597,12 +597,12 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest .setDest(DestConfig.builder().setIndex("pivot-dest").build()) .setPivotConfig(pivotConfig) .build(); - client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig), RequestOptions.DEFAULT); + client.transform().putTransform(new PutTransformRequest(transformConfig), RequestOptions.DEFAULT); transformsToClean.add(id); // tag::get-transform-stats-request - GetDataFrameTransformStatsRequest request = - new GetDataFrameTransformStatsRequest(id); // <1> + GetTransformStatsRequest request = + new GetTransformStatsRequest(id); // <1> // end::get-transform-stats-request // tag::get-transform-stats-request-options @@ -612,38 +612,38 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest { // tag::get-transform-stats-execute - GetDataFrameTransformStatsResponse response = - client.dataFrame() - .getDataFrameTransformStats(request, RequestOptions.DEFAULT); + GetTransformStatsResponse response = + client.transform() + .getTransformStats(request, RequestOptions.DEFAULT); // end::get-transform-stats-execute assertThat(response.getTransformsStats(), hasSize(1)); // tag::get-transform-stats-response - DataFrameTransformStats stats = + TransformStats stats = response.getTransformsStats().get(0); // <1> - DataFrameTransformStats.State state = + TransformStats.State state = stats.getState(); // <2> - DataFrameIndexerTransformStats indexerStats = + TransformIndexerStats indexerStats = stats.getIndexerStats(); // <3> - DataFrameTransformProgress progress = + TransformProgress progress = stats.getCheckpointingInfo() .getNext().getCheckpointProgress(); // <4> NodeAttributes node = stats.getNode(); // <5> // end::get-transform-stats-response - assertEquals(DataFrameTransformStats.State.STOPPED, state); + assertEquals(TransformStats.State.STOPPED, state); assertNotNull(indexerStats); assertNull(progress); } { // tag::get-transform-stats-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override public void onResponse( - GetDataFrameTransformStatsResponse response) { + GetTransformStatsResponse response) { // <1> } @@ -659,7 +659,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest listener = new LatchedActionListener<>(listener, latch); // tag::get-transform-stats-execute-async - client.dataFrame().getDataFrameTransformStatsAsync( + client.transform().getTransformStatsAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::get-transform-stats-execute-async @@ -679,7 +679,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build(); - DataFrameTransformConfig putTransformConfig = DataFrameTransformConfig.builder() + TransformConfig putTransformConfig = TransformConfig.builder() .setId("mega-transform") .setSource(SourceConfig.builder() .setIndex("source-data") @@ -690,13 +690,13 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest .build(); RestHighLevelClient client = highLevelClient(); - client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(putTransformConfig), RequestOptions.DEFAULT); + client.transform().putTransform(new PutTransformRequest(putTransformConfig), RequestOptions.DEFAULT); transformsToClean.add(putTransformConfig.getId()); { // tag::get-transform-request - GetDataFrameTransformRequest request = - new GetDataFrameTransformRequest("mega-transform"); // <1> + GetTransformRequest request = + new GetTransformRequest("mega-transform"); // <1> // end::get-transform-request // tag::get-transform-request-options @@ -705,13 +705,13 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest // end::get-transform-request-options // tag::get-transform-execute - GetDataFrameTransformResponse response = - client.dataFrame() - .getDataFrameTransform(request, RequestOptions.DEFAULT); + GetTransformResponse response = + client.transform() + .getTransform(request, RequestOptions.DEFAULT); // end::get-transform-execute // tag::get-transform-response - List transformConfigs = + List transformConfigs = response.getTransformConfigurations(); // end::get-transform-response @@ -719,10 +719,10 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest } { // tag::get-transform-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(GetDataFrameTransformResponse response) { + public void onResponse(GetTransformResponse response) { // <1> } @@ -737,10 +737,10 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - GetDataFrameTransformRequest request = new GetDataFrameTransformRequest("mega-transform"); + GetTransformRequest request = new GetTransformRequest("mega-transform"); // tag::get-transform-execute-async - client.dataFrame().getDataFrameTransformAsync( + client.transform().getTransformAsync( request, RequestOptions.DEFAULT, listener); // <1> // end::get-transform-execute-async diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/PutIndexTemplateRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/PutIndexTemplateRequestTests.java index 8aab973982fc..9118690201e5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/PutIndexTemplateRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/PutIndexTemplateRequestTests.java @@ -18,52 +18,45 @@ */ package org.elasticsearch.client.indices; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.alias.Alias; +import org.elasticsearch.client.AbstractRequestTestCase; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.io.UncheckedIOException; -import java.util.Arrays; -import java.util.Collections; +import java.util.List; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.core.Is.is; -public class PutIndexTemplateRequestTests extends AbstractXContentTestCase { +public class PutIndexTemplateRequestTests extends AbstractRequestTestCase { + public void testValidateErrorMessage() throws Exception { - expectThrows(IllegalArgumentException.class, () -> new PutIndexTemplateRequest(null)); - expectThrows(IllegalArgumentException.class, () -> new PutIndexTemplateRequest("test").name(null)); - PutIndexTemplateRequest request = new PutIndexTemplateRequest("test"); - ActionRequestValidationException withoutPattern = request.validate(); - assertThat(withoutPattern.getMessage(), containsString("index patterns are missing")); - - request.name("foo"); - ActionRequestValidationException withoutIndexPatterns = request.validate(); - assertThat(withoutIndexPatterns.validationErrors(), hasSize(1)); - assertThat(withoutIndexPatterns.getMessage(), containsString("index patterns are missing")); - - request.patterns(Collections.singletonList("test-*")); - ActionRequestValidationException noError = request.validate(); - assertThat(noError, is(nullValue())); + expectThrows(IllegalArgumentException.class, () -> new PutIndexTemplateRequest(null, null)); + expectThrows(IllegalArgumentException.class, () -> new PutIndexTemplateRequest("test", List.of("index")).name(null)); + Exception e = expectThrows(IllegalArgumentException.class, () -> new PutIndexTemplateRequest("test", null)); + assertThat(e.getMessage(), containsString("index patterns are missing")); + e = expectThrows(IllegalArgumentException.class, () -> new PutIndexTemplateRequest("test", List.of())); + assertThat(e.getMessage(), containsString("index patterns are missing")); + new PutIndexTemplateRequest("test", List.of("index")); } @Override - protected PutIndexTemplateRequest createTestInstance() { - PutIndexTemplateRequest request = new PutIndexTemplateRequest("test"); + protected PutIndexTemplateRequest createClientTestInstance() { + PutIndexTemplateRequest request = new PutIndexTemplateRequest("test", + List.of(ESTestCase.generateRandomStringArray(20, 100, false, false))); if (randomBoolean()) { request.version(randomInt()); } if (randomBoolean()) { request.order(randomInt()); } - request.patterns(Arrays.asList(generateRandomStringArray(20, 100, false, false))); int numAlias = between(0, 5); for (int i = 0; i < numAlias; i++) { // some ASCII or Latin-1 control characters, especially newline, can lead to @@ -80,9 +73,10 @@ public class PutIndexTemplateRequestTests extends AbstractXContentTestCase new DeleteModelSnapshotRequest(randomAlphaOfLength(10), null)); assertEquals("[snapshot_id] must not be null", ex.getMessage()); } - - private DeleteModelSnapshotRequest createTestInstance() { - return new DeleteModelSnapshotRequest(randomAlphaOfLength(10), randomAlphaOfLength(10)); - } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncodingTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncodingTests.java new file mode 100644 index 000000000000..8e9aa30930af --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncodingTests.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.preprocessing; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Predicate; + + +public class FrequencyEncodingTests extends AbstractXContentTestCase { + + @Override + protected FrequencyEncoding doParseInstance(XContentParser parser) throws IOException { + return FrequencyEncoding.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> !field.isEmpty(); + } + + @Override + protected FrequencyEncoding createTestInstance() { + return createRandom(); + } + + public static FrequencyEncoding createRandom() { + int valuesSize = randomIntBetween(1, 10); + Map valueMap = new HashMap<>(); + for (int i = 0; i < valuesSize; i++) { + valueMap.put(randomAlphaOfLength(10), randomDoubleBetween(0.0, 1.0, false)); + } + return new FrequencyEncoding(randomAlphaOfLength(10), randomAlphaOfLength(10), valueMap); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncodingTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncodingTests.java new file mode 100644 index 000000000000..d8cd0d1f87a4 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncodingTests.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.preprocessing; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Predicate; + + +public class OneHotEncodingTests extends AbstractXContentTestCase { + + @Override + protected OneHotEncoding doParseInstance(XContentParser parser) throws IOException { + return OneHotEncoding.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> !field.isEmpty(); + } + + @Override + protected OneHotEncoding createTestInstance() { + return createRandom(); + } + + public static OneHotEncoding createRandom() { + int valuesSize = randomIntBetween(1, 10); + Map valueMap = new HashMap<>(); + for (int i = 0; i < valuesSize; i++) { + valueMap.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); + } + return new OneHotEncoding(randomAlphaOfLength(10), valueMap); + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncodingTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncodingTests.java new file mode 100644 index 000000000000..8e751f752f8f --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncodingTests.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.preprocessing; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Predicate; + + +public class TargetMeanEncodingTests extends AbstractXContentTestCase { + + @Override + protected TargetMeanEncoding doParseInstance(XContentParser parser) throws IOException { + return TargetMeanEncoding.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> !field.isEmpty(); + } + + @Override + protected TargetMeanEncoding createTestInstance() { + return createRandom(); + } + + public static TargetMeanEncoding createRandom() { + int valuesSize = randomIntBetween(1, 10); + Map valueMap = new HashMap<>(); + for (int i = 0; i < valuesSize; i++) { + valueMap.put(randomAlphaOfLength(10), randomDoubleBetween(0.0, 1.0, false)); + } + return new TargetMeanEncoding(randomAlphaOfLength(10), + randomAlphaOfLength(10), + valueMap, + randomDoubleBetween(0.0, 1.0, false)); + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNodeTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNodeTests.java new file mode 100644 index 000000000000..733a9ddc3d94 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNodeTests.java @@ -0,0 +1,72 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.trainedmodel.tree; + +import org.elasticsearch.client.ml.job.config.Operator; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class TreeNodeTests extends AbstractXContentTestCase { + + @Override + protected TreeNode doParseInstance(XContentParser parser) throws IOException { + return TreeNode.fromXContent(parser).build(); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected TreeNode createTestInstance() { + Integer lft = randomBoolean() ? null : randomInt(100); + Integer rgt = randomBoolean() ? randomInt(100) : null; + Double threshold = lft != null || randomBoolean() ? randomDouble() : null; + Integer featureIndex = lft != null || randomBoolean() ? randomInt(100) : null; + return createRandom(randomInt(), lft, rgt, threshold, featureIndex, randomBoolean() ? null : randomFrom(Operator.values())).build(); + } + + public static TreeNode createRandomLeafNode(double internalValue) { + return TreeNode.builder(randomInt(100)) + .setDefaultLeft(randomBoolean() ? null : randomBoolean()) + .setLeafValue(internalValue) + .build(); + } + + public static TreeNode.Builder createRandom(int nodeIndex, + Integer left, + Integer right, + Double threshold, + Integer featureIndex, + Operator operator) { + return TreeNode.builder(nodeIndex) + .setLeafValue(left == null ? randomDouble() : null) + .setDefaultLeft(randomBoolean() ? null : randomBoolean()) + .setLeftChild(left) + .setRightChild(right) + .setThreshold(threshold) + .setOperator(operator) + .setSplitFeature(featureIndex) + .setSplitGain(randomBoolean() ? null : randomDouble()); + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeTests.java new file mode 100644 index 000000000000..66cdb44b1007 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeTests.java @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.inference.trainedmodel.tree; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Predicate; + + +public class TreeTests extends AbstractXContentTestCase { + + @Override + protected Tree doParseInstance(XContentParser parser) throws IOException { + return Tree.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> field.startsWith("feature_names"); + } + + @Override + protected Tree createTestInstance() { + return createRandom(); + } + + public static Tree createRandom() { + return buildRandomTree(randomIntBetween(2, 15), 6); + } + + public static Tree buildRandomTree(int numFeatures, int depth) { + + Tree.Builder builder = Tree.builder(); + List featureNames = new ArrayList<>(numFeatures); + for(int i = 0; i < numFeatures; i++) { + featureNames.add(randomAlphaOfLength(10)); + } + builder.setFeatureNames(featureNames); + + TreeNode.Builder node = builder.addJunction(0, randomInt(numFeatures), true, randomDouble()); + List childNodes = List.of(node.getLeftChild(), node.getRightChild()); + + for (int i = 0; i < depth -1; i++) { + + List nextNodes = new ArrayList<>(); + for (int nodeId : childNodes) { + if (i == depth -2) { + builder.addLeaf(nodeId, randomDouble()); + } else { + TreeNode.Builder childNode = + builder.addJunction(nodeId, randomInt(numFeatures), true, randomDouble()); + nextNodes.add(childNode.getLeftChild()); + nextNodes.add(childNode.getRightChild()); + } + } + childNodes = nextNodes; + } + + return builder.build(); + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/DeleteDataFrameTransformRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/DeleteTransformRequestTests.java similarity index 74% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/DeleteDataFrameTransformRequestTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/DeleteTransformRequestTests.java index dd20d513970a..bf70c426936d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/DeleteDataFrameTransformRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/DeleteTransformRequestTests.java @@ -23,11 +23,11 @@ import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.containsString; -public class DeleteDataFrameTransformRequestTests extends ESTestCase { +public class DeleteTransformRequestTests extends ESTestCase { public void testValidate() { - assertFalse(new DeleteDataFrameTransformRequest("valid-id").validate().isPresent()); - assertThat(new DeleteDataFrameTransformRequest(null).validate().get().getMessage(), - containsString("data frame transform id must not be null")); + assertFalse(new DeleteTransformRequest("valid-id").validate().isPresent()); + assertThat(new DeleteTransformRequest(null).validate().get().getMessage(), + containsString("transform id must not be null")); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformRequestTests.java similarity index 74% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformRequestTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformRequestTests.java index 044d880f8e8e..d9d9702219a7 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformRequestTests.java @@ -23,10 +23,10 @@ import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.containsString; -public class GetDataFrameTransformRequestTests extends ESTestCase { +public class GetTransformRequestTests extends ESTestCase { public void testValidate() { - assertFalse(new GetDataFrameTransformRequest("valid-id").validate().isPresent()); - assertThat(new GetDataFrameTransformRequest(new String[0]).validate().get().getMessage(), - containsString("data frame transform id must not be null")); + assertFalse(new GetTransformRequest("valid-id").validate().isPresent()); + assertThat(new GetTransformRequest(new String[0]).validate().get().getMessage(), + containsString("transform id must not be null")); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformResponseTests.java similarity index 71% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformResponseTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformResponseTests.java index 08b43823c713..30453f98948f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformResponseTests.java @@ -19,8 +19,8 @@ package org.elasticsearch.client.transform; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigTests; +import org.elasticsearch.client.transform.transforms.TransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfigTests; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -36,32 +36,32 @@ import java.util.List; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class GetDataFrameTransformResponseTests extends ESTestCase { +public class GetTransformResponseTests extends ESTestCase { public void testXContentParser() throws IOException { xContentTester(this::createParser, - GetDataFrameTransformResponseTests::createTestInstance, - GetDataFrameTransformResponseTests::toXContent, - GetDataFrameTransformResponse::fromXContent) + GetTransformResponseTests::createTestInstance, + GetTransformResponseTests::toXContent, + GetTransformResponse::fromXContent) .supportsUnknownFields(false) .test(); } - private static GetDataFrameTransformResponse createTestInstance() { + private static GetTransformResponse createTestInstance() { int numTransforms = randomIntBetween(0, 3); - List transforms = new ArrayList<>(); + List transforms = new ArrayList<>(); for (int i=0; i invalidIds = Arrays.asList(generateRandomStringArray(5, 6, false, false)); - invalidTransforms = new GetDataFrameTransformResponse.InvalidTransforms(invalidIds); + invalidTransforms = new GetTransformResponse.InvalidTransforms(invalidIds); } - return new GetDataFrameTransformResponse(transforms, transforms.size() + 10, invalidTransforms); + return new GetTransformResponse(transforms, transforms.size() + 10, invalidTransforms); } - private static void toXContent(GetDataFrameTransformResponse response, XContentBuilder builder) throws IOException { + private static void toXContent(GetTransformResponse response, XContentBuilder builder) throws IOException { builder.startObject(); { builder.field("count", response.getCount()); @@ -80,7 +80,7 @@ public class GetDataFrameTransformResponseTests extends ESTestCase { protected NamedXContentRegistry xContentRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); List namedXContents = searchModule.getNamedXContents(); - namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers()); + namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers()); return new NamedXContentRegistry(namedXContents); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformStatsRequestTests.java similarity index 74% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsRequestTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformStatsRequestTests.java index 68fd9ff88533..cb99d5d6081e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformStatsRequestTests.java @@ -23,10 +23,10 @@ import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.containsString; -public class GetDataFrameTransformStatsRequestTests extends ESTestCase { +public class GetTransformStatsRequestTests extends ESTestCase { public void testValidate() { - assertFalse(new GetDataFrameTransformStatsRequest("valid-id").validate().isPresent()); - assertThat(new GetDataFrameTransformStatsRequest(null).validate().get().getMessage(), - containsString("data frame transform id must not be null")); + assertFalse(new GetTransformStatsRequest("valid-id").validate().isPresent()); + assertThat(new GetTransformStatsRequest(null).validate().get().getMessage(), + containsString("transform id must not be null")); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformStatsResponseTests.java similarity index 70% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsResponseTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformStatsResponseTests.java index d903215284a0..d87d0c861ba6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetDataFrameTransformStatsResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/GetTransformStatsResponseTests.java @@ -21,8 +21,8 @@ package org.elasticsearch.client.transform; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.client.transform.transforms.DataFrameTransformStats; -import org.elasticsearch.client.transform.transforms.DataFrameTransformStatsTests; +import org.elasticsearch.client.transform.transforms.TransformStats; +import org.elasticsearch.client.transform.transforms.TransformStatsTests; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESTestCase; @@ -32,25 +32,25 @@ import java.util.List; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class GetDataFrameTransformStatsResponseTests extends ESTestCase { +public class GetTransformStatsResponseTests extends ESTestCase { public void testXContentParser() throws IOException { xContentTester(this::createParser, - GetDataFrameTransformStatsResponseTests::createTestInstance, - GetDataFrameTransformStatsResponseTests::toXContent, - GetDataFrameTransformStatsResponse::fromXContent) - .assertEqualsConsumer(GetDataFrameTransformStatsResponseTests::assertEqualInstances) + GetTransformStatsResponseTests::createTestInstance, + GetTransformStatsResponseTests::toXContent, + GetTransformStatsResponse::fromXContent) + .assertEqualsConsumer(GetTransformStatsResponseTests::assertEqualInstances) .assertToXContentEquivalence(false) .supportsUnknownFields(true) .randomFieldsExcludeFilter(path -> path.isEmpty() == false) .test(); } - private static GetDataFrameTransformStatsResponse createTestInstance() { + private static GetTransformStatsResponse createTestInstance() { int count = randomIntBetween(1, 3); - List stats = new ArrayList<>(); + List stats = new ArrayList<>(); for (int i=0; i taskFailures = null; @@ -66,19 +66,19 @@ public class GetDataFrameTransformStatsResponseTests extends ESTestCase { nodeFailures = new ArrayList<>(); int numNodeFailures = randomIntBetween(1, 4); for (int i=0; i { +public class PreviewTransformRequestTests extends AbstractXContentTestCase { @Override - protected PreviewDataFrameTransformRequest createTestInstance() { - return new PreviewDataFrameTransformRequest(DataFrameTransformConfigTests.randomDataFrameTransformConfig()); + protected PreviewTransformRequest createTestInstance() { + return new PreviewTransformRequest(TransformConfigTests.randomTransformConfig()); } @Override - protected PreviewDataFrameTransformRequest doParseInstance(XContentParser parser) throws IOException { - return new PreviewDataFrameTransformRequest(DataFrameTransformConfig.fromXContent(parser)); + protected PreviewTransformRequest doParseInstance(XContentParser parser) throws IOException { + return new PreviewTransformRequest(TransformConfig.fromXContent(parser)); } @Override @@ -57,27 +57,27 @@ public class PreviewDataFrameTransformRequestTests extends AbstractXContentTestC protected NamedXContentRegistry xContentRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); List namedXContents = searchModule.getNamedXContents(); - namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers()); + namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers()); return new NamedXContentRegistry(namedXContents); } public void testValidate() { - assertFalse(new PreviewDataFrameTransformRequest(DataFrameTransformConfigTests.randomDataFrameTransformConfig()) + assertFalse(new PreviewTransformRequest(TransformConfigTests.randomTransformConfig()) .validate().isPresent()); - assertThat(new PreviewDataFrameTransformRequest(null).validate().get().getMessage(), - containsString("preview requires a non-null data frame config")); + assertThat(new PreviewTransformRequest(null).validate().get().getMessage(), + containsString("preview requires a non-null transform config")); // null id and destination is valid - DataFrameTransformConfig config = DataFrameTransformConfig.forPreview(randomSourceConfig(), PivotConfigTests.randomPivotConfig()); + TransformConfig config = TransformConfig.forPreview(randomSourceConfig(), PivotConfigTests.randomPivotConfig()); - assertFalse(new PreviewDataFrameTransformRequest(config).validate().isPresent()); + assertFalse(new PreviewTransformRequest(config).validate().isPresent()); // null source is not valid - config = DataFrameTransformConfig.builder().setPivotConfig(PivotConfigTests.randomPivotConfig()).build(); + config = TransformConfig.builder().setPivotConfig(PivotConfigTests.randomPivotConfig()).build(); - Optional error = new PreviewDataFrameTransformRequest(config).validate(); + Optional error = new PreviewTransformRequest(config).validate(); assertTrue(error.isPresent()); - assertThat(error.get().getMessage(), containsString("data frame transform source cannot be null")); + assertThat(error.get().getMessage(), containsString("transform source cannot be null")); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PreviewDataFrameTransformResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PreviewTransformResponseTests.java similarity index 86% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PreviewDataFrameTransformResponseTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PreviewTransformResponseTests.java index d7579bc285da..8e1dbefa127a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PreviewDataFrameTransformResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PreviewTransformResponseTests.java @@ -30,19 +30,19 @@ import java.util.Map; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class PreviewDataFrameTransformResponseTests extends ESTestCase { +public class PreviewTransformResponseTests extends ESTestCase { public void testFromXContent() throws IOException { xContentTester(this::createParser, this::createTestInstance, this::toXContent, - PreviewDataFrameTransformResponse::fromXContent) + PreviewTransformResponse::fromXContent) .supportsUnknownFields(true) .randomFieldsExcludeFilter(path -> path.isEmpty() == false) .test(); } - private PreviewDataFrameTransformResponse createTestInstance() { + private PreviewTransformResponse createTestInstance() { int numDocs = randomIntBetween(5, 10); List> docs = new ArrayList<>(numDocs); for (int i=0; i doc : response.getDocs()) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PutDataFrameTransformRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PutTransformRequestTests.java similarity index 60% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PutDataFrameTransformRequestTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PutTransformRequestTests.java index a823f7405083..52c4770fb9fd 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PutDataFrameTransformRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/PutTransformRequestTests.java @@ -20,8 +20,8 @@ package org.elasticsearch.client.transform; import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigTests; +import org.elasticsearch.client.transform.transforms.TransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfigTests; import org.elasticsearch.client.transform.transforms.pivot.PivotConfigTests; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -36,32 +36,32 @@ import java.util.Optional; import static org.hamcrest.Matchers.containsString; -public class PutDataFrameTransformRequestTests extends AbstractXContentTestCase { +public class PutTransformRequestTests extends AbstractXContentTestCase { public void testValidate() { assertFalse(createTestInstance().validate().isPresent()); - DataFrameTransformConfig config = DataFrameTransformConfig.builder().setPivotConfig(PivotConfigTests.randomPivotConfig()).build(); + TransformConfig config = TransformConfig.builder().setPivotConfig(PivotConfigTests.randomPivotConfig()).build(); - Optional error = new PutDataFrameTransformRequest(config).validate(); + Optional error = new PutTransformRequest(config).validate(); assertTrue(error.isPresent()); - assertThat(error.get().getMessage(), containsString("data frame transform id cannot be null")); - assertThat(error.get().getMessage(), containsString("data frame transform source cannot be null")); - assertThat(error.get().getMessage(), containsString("data frame transform destination cannot be null")); + assertThat(error.get().getMessage(), containsString("transform id cannot be null")); + assertThat(error.get().getMessage(), containsString("transform source cannot be null")); + assertThat(error.get().getMessage(), containsString("transform destination cannot be null")); - error = new PutDataFrameTransformRequest(null).validate(); + error = new PutTransformRequest(null).validate(); assertTrue(error.isPresent()); - assertThat(error.get().getMessage(), containsString("put requires a non-null data frame config")); + assertThat(error.get().getMessage(), containsString("put requires a non-null transform config")); } @Override - protected PutDataFrameTransformRequest createTestInstance() { - return new PutDataFrameTransformRequest(DataFrameTransformConfigTests.randomDataFrameTransformConfig()); + protected PutTransformRequest createTestInstance() { + return new PutTransformRequest(TransformConfigTests.randomTransformConfig()); } @Override - protected PutDataFrameTransformRequest doParseInstance(XContentParser parser) throws IOException { - return new PutDataFrameTransformRequest(DataFrameTransformConfig.fromXContent(parser)); + protected PutTransformRequest doParseInstance(XContentParser parser) throws IOException { + return new PutTransformRequest(TransformConfig.fromXContent(parser)); } @Override @@ -73,7 +73,7 @@ public class PutDataFrameTransformRequestTests extends AbstractXContentTestCase< protected NamedXContentRegistry xContentRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); List namedXContents = searchModule.getNamedXContents(); - namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers()); + namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers()); return new NamedXContentRegistry(namedXContents); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StartDataFrameTransformRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StartDataFrameTransformRequestTests.java index f4950fc057a3..1ba5f6109126 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StartDataFrameTransformRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StartDataFrameTransformRequestTests.java @@ -28,14 +28,14 @@ import static org.hamcrest.Matchers.containsString; public class StartDataFrameTransformRequestTests extends ESTestCase { public void testValidate_givenNullId() { - StartDataFrameTransformRequest request = new StartDataFrameTransformRequest(null, null); + StartTransformRequest request = new StartTransformRequest(null, null); Optional validate = request.validate(); assertTrue(validate.isPresent()); - assertThat(validate.get().getMessage(), containsString("data frame transform id must not be null")); + assertThat(validate.get().getMessage(), containsString("transform id must not be null")); } public void testValidate_givenValid() { - StartDataFrameTransformRequest request = new StartDataFrameTransformRequest("foo", null); + StartTransformRequest request = new StartTransformRequest("foo", null); Optional validate = request.validate(); assertFalse(validate.isPresent()); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StopDataFrameTransformRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StopTransformRequestTests.java similarity index 82% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StopDataFrameTransformRequestTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StopTransformRequestTests.java index 5b28983c086a..155da3145829 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StopDataFrameTransformRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/StopTransformRequestTests.java @@ -26,16 +26,16 @@ import java.util.Optional; import static org.hamcrest.Matchers.containsString; -public class StopDataFrameTransformRequestTests extends ESTestCase { +public class StopTransformRequestTests extends ESTestCase { public void testValidate_givenNullId() { - StopDataFrameTransformRequest request = new StopDataFrameTransformRequest(null); + StopTransformRequest request = new StopTransformRequest(null); Optional validate = request.validate(); assertTrue(validate.isPresent()); - assertThat(validate.get().getMessage(), containsString("data frame transform id must not be null")); + assertThat(validate.get().getMessage(), containsString("transform id must not be null")); } public void testValidate_givenValid() { - StopDataFrameTransformRequest request = new StopDataFrameTransformRequest("foo"); + StopTransformRequest request = new StopTransformRequest("foo"); Optional validate = request.validate(); assertFalse(validate.isPresent()); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateDataFrameTransformResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateDataFrameTransformResponseTests.java index 52d612ec4b9a..7dd1313d14a0 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateDataFrameTransformResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateDataFrameTransformResponseTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.client.transform; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigTests; +import org.elasticsearch.client.transform.transforms.TransformConfigTests; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -38,17 +38,17 @@ public class UpdateDataFrameTransformResponseTests extends ESTestCase { xContentTester(this::createParser, UpdateDataFrameTransformResponseTests::createTestInstance, UpdateDataFrameTransformResponseTests::toXContent, - UpdateDataFrameTransformResponse::fromXContent) + UpdateTransformResponse::fromXContent) .assertToXContentEquivalence(false) .supportsUnknownFields(false) .test(); } - private static UpdateDataFrameTransformResponse createTestInstance() { - return new UpdateDataFrameTransformResponse(DataFrameTransformConfigTests.randomDataFrameTransformConfig()); + private static UpdateTransformResponse createTestInstance() { + return new UpdateTransformResponse(TransformConfigTests.randomTransformConfig()); } - private static void toXContent(UpdateDataFrameTransformResponse response, XContentBuilder builder) throws IOException { + private static void toXContent(UpdateTransformResponse response, XContentBuilder builder) throws IOException { response.getTransformConfiguration().toXContent(builder, null); } @@ -56,7 +56,7 @@ public class UpdateDataFrameTransformResponseTests extends ESTestCase { protected NamedXContentRegistry xContentRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); List namedXContents = searchModule.getNamedXContents(); - namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers()); + namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers()); return new NamedXContentRegistry(namedXContents); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateDataFrameTransformRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateTransformRequestTests.java similarity index 63% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateDataFrameTransformRequestTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateTransformRequestTests.java index 46012ef4de26..df622cc1a47f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateDataFrameTransformRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/UpdateTransformRequestTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.client.transform; import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdate; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; @@ -32,34 +32,34 @@ import java.util.Collections; import java.util.List; import java.util.Optional; -import static org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdateTests.randomDataFrameTransformConfigUpdate; +import static org.elasticsearch.client.transform.transforms.TransformConfigUpdateTests.randomTransformConfigUpdate; import static org.hamcrest.Matchers.containsString; -public class UpdateDataFrameTransformRequestTests extends AbstractXContentTestCase { +public class UpdateTransformRequestTests extends AbstractXContentTestCase { public void testValidate() { assertFalse(createTestInstance().validate().isPresent()); - DataFrameTransformConfigUpdate config = randomDataFrameTransformConfigUpdate(); + TransformConfigUpdate config = randomTransformConfigUpdate(); - Optional error = new UpdateDataFrameTransformRequest(config, null).validate(); + Optional error = new UpdateTransformRequest(config, null).validate(); assertTrue(error.isPresent()); - assertThat(error.get().getMessage(), containsString("data frame transform id cannot be null")); + assertThat(error.get().getMessage(), containsString("transform id cannot be null")); - error = new UpdateDataFrameTransformRequest(null, "123").validate(); + error = new UpdateTransformRequest(null, "123").validate(); assertTrue(error.isPresent()); - assertThat(error.get().getMessage(), containsString("put requires a non-null data frame config")); + assertThat(error.get().getMessage(), containsString("put requires a non-null transform config")); } private final String transformId = randomAlphaOfLength(10); @Override - protected UpdateDataFrameTransformRequest createTestInstance() { - return new UpdateDataFrameTransformRequest(randomDataFrameTransformConfigUpdate(), transformId); + protected UpdateTransformRequest createTestInstance() { + return new UpdateTransformRequest(randomTransformConfigUpdate(), transformId); } @Override - protected UpdateDataFrameTransformRequest doParseInstance(XContentParser parser) throws IOException { - return new UpdateDataFrameTransformRequest(DataFrameTransformConfigUpdate.fromXContent(parser), transformId); + protected UpdateTransformRequest doParseInstance(XContentParser parser) throws IOException { + return new UpdateTransformRequest(TransformConfigUpdate.fromXContent(parser), transformId); } @Override @@ -71,7 +71,7 @@ public class UpdateDataFrameTransformRequestTests extends AbstractXContentTestCa protected NamedXContentRegistry xContentRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); List namedXContents = searchModule.getNamedXContents(); - namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers()); + namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers()); return new NamedXContentRegistry(namedXContents); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformCheckpointStatsTests.java similarity index 51% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointStatsTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformCheckpointStatsTests.java index d03651170f76..0095302919e9 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformCheckpointStatsTests.java @@ -26,38 +26,38 @@ import java.io.IOException; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class DataFrameTransformCheckpointStatsTests extends ESTestCase { +public class TransformCheckpointStatsTests extends ESTestCase { public void testFromXContent() throws IOException { xContentTester(this::createParser, - DataFrameTransformCheckpointStatsTests::randomDataFrameTransformCheckpointStats, - DataFrameTransformCheckpointStatsTests::toXContent, - DataFrameTransformCheckpointStats::fromXContent) + TransformCheckpointStatsTests::randomTransformCheckpointStats, + TransformCheckpointStatsTests::toXContent, + TransformCheckpointStats::fromXContent) .supportsUnknownFields(true) .randomFieldsExcludeFilter(field -> field.startsWith("position")) .test(); } - public static DataFrameTransformCheckpointStats randomDataFrameTransformCheckpointStats() { - return new DataFrameTransformCheckpointStats(randomLongBetween(1, 1_000_000), - randomBoolean() ? null : DataFrameIndexerPositionTests.randomDataFrameIndexerPosition(), - randomBoolean() ? null : DataFrameTransformProgressTests.randomInstance(), + public static TransformCheckpointStats randomTransformCheckpointStats() { + return new TransformCheckpointStats(randomLongBetween(1, 1_000_000), + randomBoolean() ? null : TransformIndexerPositionTests.randomTransformIndexerPosition(), + randomBoolean() ? null : TransformProgressTests.randomInstance(), randomLongBetween(1, 1_000_000), randomLongBetween(0, 1_000_000)); } - public static void toXContent(DataFrameTransformCheckpointStats stats, XContentBuilder builder) throws IOException { + public static void toXContent(TransformCheckpointStats stats, XContentBuilder builder) throws IOException { builder.startObject(); - builder.field(DataFrameTransformCheckpointStats.CHECKPOINT.getPreferredName(), stats.getCheckpoint()); + builder.field(TransformCheckpointStats.CHECKPOINT.getPreferredName(), stats.getCheckpoint()); if (stats.getPosition() != null) { - builder.field(DataFrameTransformCheckpointStats.POSITION.getPreferredName()); - DataFrameIndexerPositionTests.toXContent(stats.getPosition(), builder); + builder.field(TransformCheckpointStats.POSITION.getPreferredName()); + TransformIndexerPositionTests.toXContent(stats.getPosition(), builder); } if (stats.getCheckpointProgress() != null) { - builder.field(DataFrameTransformCheckpointStats.CHECKPOINT_PROGRESS.getPreferredName()); - DataFrameTransformProgressTests.toXContent(stats.getCheckpointProgress(), builder); + builder.field(TransformCheckpointStats.CHECKPOINT_PROGRESS.getPreferredName()); + TransformProgressTests.toXContent(stats.getCheckpointProgress(), builder); } - builder.field(DataFrameTransformCheckpointStats.TIMESTAMP_MILLIS.getPreferredName(), stats.getTimestampMillis()); - builder.field(DataFrameTransformCheckpointStats.TIME_UPPER_BOUND_MILLIS.getPreferredName(), stats.getTimeUpperBoundMillis()); + builder.field(TransformCheckpointStats.TIMESTAMP_MILLIS.getPreferredName(), stats.getTimestampMillis()); + builder.field(TransformCheckpointStats.TIME_UPPER_BOUND_MILLIS.getPreferredName(), stats.getTimeUpperBoundMillis()); builder.endObject(); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointingInfoTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformCheckpointingInfoTests.java similarity index 54% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointingInfoTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformCheckpointingInfoTests.java index 2ec042a4a6ad..326ecd73a152 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformCheckpointingInfoTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformCheckpointingInfoTests.java @@ -27,38 +27,38 @@ import java.time.Instant; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class DataFrameTransformCheckpointingInfoTests extends ESTestCase { +public class TransformCheckpointingInfoTests extends ESTestCase { public void testFromXContent() throws IOException { xContentTester(this::createParser, - DataFrameTransformCheckpointingInfoTests::randomDataFrameTransformCheckpointingInfo, - DataFrameTransformCheckpointingInfoTests::toXContent, - DataFrameTransformCheckpointingInfo::fromXContent) + TransformCheckpointingInfoTests::randomTransformCheckpointingInfo, + TransformCheckpointingInfoTests::toXContent, + TransformCheckpointingInfo::fromXContent) .supportsUnknownFields(false) .test(); } - public static DataFrameTransformCheckpointingInfo randomDataFrameTransformCheckpointingInfo() { - return new DataFrameTransformCheckpointingInfo( - DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(), - DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(), + public static TransformCheckpointingInfo randomTransformCheckpointingInfo() { + return new TransformCheckpointingInfo( + TransformCheckpointStatsTests.randomTransformCheckpointStats(), + TransformCheckpointStatsTests.randomTransformCheckpointStats(), randomLongBetween(0, 10000), randomBoolean() ? null : Instant.ofEpochMilli(randomNonNegativeLong())); } - public static void toXContent(DataFrameTransformCheckpointingInfo info, XContentBuilder builder) throws IOException { + public static void toXContent(TransformCheckpointingInfo info, XContentBuilder builder) throws IOException { builder.startObject(); if (info.getLast().getTimestampMillis() > 0) { - builder.field(DataFrameTransformCheckpointingInfo.LAST_CHECKPOINT.getPreferredName()); - DataFrameTransformCheckpointStatsTests.toXContent(info.getLast(), builder); + builder.field(TransformCheckpointingInfo.LAST_CHECKPOINT.getPreferredName()); + TransformCheckpointStatsTests.toXContent(info.getLast(), builder); } if (info.getNext().getTimestampMillis() > 0) { - builder.field(DataFrameTransformCheckpointingInfo.NEXT_CHECKPOINT.getPreferredName()); - DataFrameTransformCheckpointStatsTests.toXContent(info.getNext(), builder); + builder.field(TransformCheckpointingInfo.NEXT_CHECKPOINT.getPreferredName()); + TransformCheckpointStatsTests.toXContent(info.getNext(), builder); } - builder.field(DataFrameTransformCheckpointingInfo.OPERATIONS_BEHIND.getPreferredName(), info.getOperationsBehind()); + builder.field(TransformCheckpointingInfo.OPERATIONS_BEHIND.getPreferredName(), info.getOperationsBehind()); if (info.getChangesLastDetectedAt() != null) { - builder.field(DataFrameTransformCheckpointingInfo.CHANGES_LAST_DETECTED_AT.getPreferredName(), info.getChangesLastDetectedAt()); + builder.field(TransformCheckpointingInfo.CHANGES_LAST_DETECTED_AT.getPreferredName(), info.getChangesLastDetectedAt()); } builder.endObject(); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigTests.java similarity index 81% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigTests.java index 4ac30cf0bf85..ec271ead8545 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.client.transform.transforms; import org.elasticsearch.Version; -import org.elasticsearch.client.transform.DataFrameNamedXContentProvider; +import org.elasticsearch.client.transform.TransformNamedXContentProvider; import org.elasticsearch.client.transform.transforms.pivot.PivotConfigTests; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -38,10 +38,10 @@ import java.util.function.Predicate; import static org.elasticsearch.client.transform.transforms.DestConfigTests.randomDestConfig; import static org.elasticsearch.client.transform.transforms.SourceConfigTests.randomSourceConfig; -public class DataFrameTransformConfigTests extends AbstractXContentTestCase { +public class TransformConfigTests extends AbstractXContentTestCase { - public static DataFrameTransformConfig randomDataFrameTransformConfig() { - return new DataFrameTransformConfig(randomAlphaOfLengthBetween(1, 10), + public static TransformConfig randomTransformConfig() { + return new TransformConfig(randomAlphaOfLengthBetween(1, 10), randomSourceConfig(), randomDestConfig(), randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1000, 1000000)), @@ -57,13 +57,13 @@ public class DataFrameTransformConfigTests extends AbstractXContentTestCase namedXContents = searchModule.getNamedXContents(); - namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers()); + namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers()); return new NamedXContentRegistry(namedXContents); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigUpdateTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdateTests.java similarity index 78% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigUpdateTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdateTests.java index f17be4ef7323..fe8261d7ba1a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformConfigUpdateTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdateTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.client.transform.transforms; -import org.elasticsearch.client.transform.DataFrameNamedXContentProvider; +import org.elasticsearch.client.transform.TransformNamedXContentProvider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -34,10 +34,10 @@ import java.util.List; import static org.elasticsearch.client.transform.transforms.DestConfigTests.randomDestConfig; import static org.elasticsearch.client.transform.transforms.SourceConfigTests.randomSourceConfig; -public class DataFrameTransformConfigUpdateTests extends AbstractXContentTestCase { +public class TransformConfigUpdateTests extends AbstractXContentTestCase { - public static DataFrameTransformConfigUpdate randomDataFrameTransformConfigUpdate() { - return new DataFrameTransformConfigUpdate( + public static TransformConfigUpdate randomTransformConfigUpdate() { + return new TransformConfigUpdate( randomBoolean() ? null : randomSourceConfig(), randomBoolean() ? null : randomDestConfig(), randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1_000, 3_600_000)), @@ -50,8 +50,8 @@ public class DataFrameTransformConfigUpdateTests extends AbstractXContentTestCas } @Override - protected DataFrameTransformConfigUpdate doParseInstance(XContentParser parser) throws IOException { - return DataFrameTransformConfigUpdate.fromXContent(parser); + protected TransformConfigUpdate doParseInstance(XContentParser parser) throws IOException { + return TransformConfigUpdate.fromXContent(parser); } @Override @@ -60,15 +60,15 @@ public class DataFrameTransformConfigUpdateTests extends AbstractXContentTestCas } @Override - protected DataFrameTransformConfigUpdate createTestInstance() { - return randomDataFrameTransformConfigUpdate(); + protected TransformConfigUpdate createTestInstance() { + return randomTransformConfigUpdate(); } @Override protected NamedXContentRegistry xContentRegistry() { SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); List namedXContents = searchModule.getNamedXContents(); - namedXContents.addAll(new DataFrameNamedXContentProvider().getNamedXContentParsers()); + namedXContents.addAll(new TransformNamedXContentProvider().getNamedXContentParsers()); return new NamedXContentRegistry(namedXContents); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerPositionTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformIndexerPositionTests.java similarity index 85% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerPositionTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformIndexerPositionTests.java index e4d1c505532b..911966bbfc2c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerPositionTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformIndexerPositionTests.java @@ -28,24 +28,24 @@ import java.util.Map; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class DataFrameIndexerPositionTests extends ESTestCase { +public class TransformIndexerPositionTests extends ESTestCase { public void testFromXContent() throws IOException { xContentTester(this::createParser, - DataFrameIndexerPositionTests::randomDataFrameIndexerPosition, - DataFrameIndexerPositionTests::toXContent, - DataFrameIndexerPosition::fromXContent) + TransformIndexerPositionTests::randomTransformIndexerPosition, + TransformIndexerPositionTests::toXContent, + TransformIndexerPosition::fromXContent) .supportsUnknownFields(true) .randomFieldsExcludeFilter(field -> field.equals("indexer_position") || field.equals("bucket_position")) .test(); } - public static DataFrameIndexerPosition randomDataFrameIndexerPosition() { - return new DataFrameIndexerPosition(randomPositionMap(), randomPositionMap()); + public static TransformIndexerPosition randomTransformIndexerPosition() { + return new TransformIndexerPosition(randomPositionMap(), randomPositionMap()); } - public static void toXContent(DataFrameIndexerPosition position, XContentBuilder builder) throws IOException { + public static void toXContent(TransformIndexerPosition position, XContentBuilder builder) throws IOException { builder.startObject(); if (position.getIndexerPosition() != null) { builder.field("indexer_position", position.getIndexerPosition()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerTransformStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformIndexerStatsTests.java similarity index 76% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerTransformStatsTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformIndexerStatsTests.java index f6174815aa4c..018cab89b0fc 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameIndexerTransformStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformIndexerStatsTests.java @@ -27,20 +27,20 @@ import java.io.IOException; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class DataFrameIndexerTransformStatsTests extends ESTestCase { +public class TransformIndexerStatsTests extends ESTestCase { public void testFromXContent() throws IOException { xContentTester( this::createParser, - DataFrameIndexerTransformStatsTests::randomStats, - DataFrameIndexerTransformStatsTests::toXContent, - DataFrameIndexerTransformStats::fromXContent) + TransformIndexerStatsTests::randomStats, + TransformIndexerStatsTests::toXContent, + TransformIndexerStats::fromXContent) .supportsUnknownFields(true) .test(); } - public static DataFrameIndexerTransformStats randomStats() { - return new DataFrameIndexerTransformStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), + public static TransformIndexerStats randomStats() { + return new TransformIndexerStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomBoolean() ? null : randomDouble(), @@ -48,7 +48,7 @@ public class DataFrameIndexerTransformStatsTests extends ESTestCase { randomBoolean() ? null : randomDouble()); } - public static void toXContent(DataFrameIndexerTransformStats stats, XContentBuilder builder) throws IOException { + public static void toXContent(TransformIndexerStats stats, XContentBuilder builder) throws IOException { builder.startObject(); builder.field(IndexerJobStats.NUM_PAGES.getPreferredName(), stats.getNumPages()); builder.field(IndexerJobStats.NUM_INPUT_DOCUMENTS.getPreferredName(), stats.getNumDocuments()); @@ -60,11 +60,11 @@ public class DataFrameIndexerTransformStatsTests extends ESTestCase { builder.field(IndexerJobStats.SEARCH_TIME_IN_MS.getPreferredName(), stats.getSearchTime()); builder.field(IndexerJobStats.SEARCH_TOTAL.getPreferredName(), stats.getSearchTotal()); builder.field(IndexerJobStats.SEARCH_FAILURES.getPreferredName(), stats.getSearchFailures()); - builder.field(DataFrameIndexerTransformStats.EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS.getPreferredName(), + builder.field(TransformIndexerStats.EXPONENTIAL_AVG_CHECKPOINT_DURATION_MS.getPreferredName(), stats.getExpAvgCheckpointDurationMs()); - builder.field(DataFrameIndexerTransformStats.EXPONENTIAL_AVG_DOCUMENTS_INDEXED.getPreferredName(), + builder.field(TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_INDEXED.getPreferredName(), stats.getExpAvgDocumentsIndexed()); - builder.field(DataFrameIndexerTransformStats.EXPONENTIAL_AVG_DOCUMENTS_PROCESSED.getPreferredName(), + builder.field(TransformIndexerStats.EXPONENTIAL_AVG_DOCUMENTS_PROCESSED.getPreferredName(), stats.getExpAvgDocumentsProcessed()); builder.endObject(); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformProgressTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformProgressTests.java similarity index 62% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformProgressTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformProgressTests.java index faf6805ac275..f7b46b892986 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformProgressTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformProgressTests.java @@ -26,19 +26,19 @@ import java.io.IOException; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class DataFrameTransformProgressTests extends ESTestCase { +public class TransformProgressTests extends ESTestCase { public void testFromXContent() throws IOException { xContentTester(this::createParser, - DataFrameTransformProgressTests::randomInstance, - DataFrameTransformProgressTests::toXContent, - DataFrameTransformProgress::fromXContent) + TransformProgressTests::randomInstance, + TransformProgressTests::toXContent, + TransformProgress::fromXContent) .supportsUnknownFields(true) .test(); } - public static DataFrameTransformProgress randomInstance() { - return new DataFrameTransformProgress( + public static TransformProgress randomInstance() { + return new TransformProgress( randomBoolean() ? null : randomNonNegativeLong(), randomBoolean() ? null : randomNonNegativeLong(), randomBoolean() ? null : randomDouble(), @@ -46,19 +46,19 @@ public class DataFrameTransformProgressTests extends ESTestCase { randomBoolean() ? null : randomNonNegativeLong()); } - public static void toXContent(DataFrameTransformProgress progress, XContentBuilder builder) throws IOException { + public static void toXContent(TransformProgress progress, XContentBuilder builder) throws IOException { builder.startObject(); if (progress.getTotalDocs() != null) { - builder.field(DataFrameTransformProgress.TOTAL_DOCS.getPreferredName(), progress.getTotalDocs()); + builder.field(TransformProgress.TOTAL_DOCS.getPreferredName(), progress.getTotalDocs()); } if (progress.getPercentComplete() != null) { - builder.field(DataFrameTransformProgress.PERCENT_COMPLETE.getPreferredName(), progress.getPercentComplete()); + builder.field(TransformProgress.PERCENT_COMPLETE.getPreferredName(), progress.getPercentComplete()); } if (progress.getRemainingDocs() != null) { - builder.field(DataFrameTransformProgress.DOCS_REMAINING.getPreferredName(), progress.getRemainingDocs()); + builder.field(TransformProgress.DOCS_REMAINING.getPreferredName(), progress.getRemainingDocs()); } - builder.field(DataFrameTransformProgress.DOCS_INDEXED.getPreferredName(), progress.getDocumentsIndexed()); - builder.field(DataFrameTransformProgress.DOCS_PROCESSED.getPreferredName(), progress.getDocumentsProcessed()); + builder.field(TransformProgress.DOCS_INDEXED.getPreferredName(), progress.getDocumentsIndexed()); + builder.field(TransformProgress.DOCS_PROCESSED.getPreferredName(), progress.getDocumentsProcessed()); builder.endObject(); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformStatsTests.java similarity index 57% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformStatsTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformStatsTests.java index 4b3658f6ea16..873d4aef1d36 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/DataFrameTransformStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformStatsTests.java @@ -27,46 +27,46 @@ import java.io.IOException; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; -public class DataFrameTransformStatsTests extends ESTestCase { +public class TransformStatsTests extends ESTestCase { public void testFromXContent() throws IOException { xContentTester(this::createParser, - DataFrameTransformStatsTests::randomInstance, - DataFrameTransformStatsTests::toXContent, - DataFrameTransformStats::fromXContent) + TransformStatsTests::randomInstance, + TransformStatsTests::toXContent, + TransformStats::fromXContent) .supportsUnknownFields(true) .randomFieldsExcludeFilter(field -> field.equals("node.attributes") || field.contains("position")) .test(); } - public static DataFrameTransformStats randomInstance() { - return new DataFrameTransformStats(randomAlphaOfLength(10), - randomBoolean() ? null : randomFrom(DataFrameTransformStats.State.values()), + public static TransformStats randomInstance() { + return new TransformStats(randomAlphaOfLength(10), + randomBoolean() ? null : randomFrom(TransformStats.State.values()), randomBoolean() ? null : randomAlphaOfLength(100), randomBoolean() ? null : NodeAttributesTests.createRandom(), - DataFrameIndexerTransformStatsTests.randomStats(), - randomBoolean() ? null : DataFrameTransformCheckpointingInfoTests.randomDataFrameTransformCheckpointingInfo()); + TransformIndexerStatsTests.randomStats(), + randomBoolean() ? null : TransformCheckpointingInfoTests.randomTransformCheckpointingInfo()); } - public static void toXContent(DataFrameTransformStats stats, XContentBuilder builder) throws IOException { + public static void toXContent(TransformStats stats, XContentBuilder builder) throws IOException { builder.startObject(); - builder.field(DataFrameTransformStats.ID.getPreferredName(), stats.getId()); + builder.field(TransformStats.ID.getPreferredName(), stats.getId()); if (stats.getState() != null) { - builder.field(DataFrameTransformStats.STATE_FIELD.getPreferredName(), + builder.field(TransformStats.STATE_FIELD.getPreferredName(), stats.getState().value()); } if (stats.getReason() != null) { - builder.field(DataFrameTransformStats.REASON_FIELD.getPreferredName(), stats.getReason()); + builder.field(TransformStats.REASON_FIELD.getPreferredName(), stats.getReason()); } if (stats.getNode() != null) { - builder.field(DataFrameTransformStats.NODE_FIELD.getPreferredName()); + builder.field(TransformStats.NODE_FIELD.getPreferredName()); stats.getNode().toXContent(builder, ToXContent.EMPTY_PARAMS); } - builder.field(DataFrameTransformStats.STATS_FIELD.getPreferredName()); - DataFrameIndexerTransformStatsTests.toXContent(stats.getIndexerStats(), builder); + builder.field(TransformStats.STATS_FIELD.getPreferredName()); + TransformIndexerStatsTests.toXContent(stats.getIndexerStats(), builder); if (stats.getCheckpointingInfo() != null) { - builder.field(DataFrameTransformStats.CHECKPOINTING_INFO_FIELD.getPreferredName()); - DataFrameTransformCheckpointingInfoTests.toXContent(stats.getCheckpointingInfo(), builder); + builder.field(TransformStats.CHECKPOINTING_INFO_FIELD.getPreferredName()); + TransformCheckpointingInfoTests.toXContent(stats.getCheckpointingInfo(), builder); } builder.endObject(); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformCheckpointStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformCheckpointStatsTests.java similarity index 75% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformCheckpointStatsTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformCheckpointStatsTests.java index 6299431893c1..e9ca2b019fd1 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformCheckpointStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformCheckpointStatsTests.java @@ -28,40 +28,40 @@ import java.io.IOException; import static org.hamcrest.Matchers.equalTo; -public class DataFrameTransformCheckpointStatsTests extends AbstractResponseTestCase< +public class TransformCheckpointStatsTests extends AbstractResponseTestCase< TransformCheckpointStats, - org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats> { + org.elasticsearch.client.transform.transforms.TransformCheckpointStats> { public static TransformCheckpointStats fromHlrc( - org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats instance) { + org.elasticsearch.client.transform.transforms.TransformCheckpointStats instance) { return new TransformCheckpointStats(instance.getCheckpoint(), - DataFrameIndexerPositionTests.fromHlrc(instance.getPosition()), - DataFrameTransformProgressTests.fromHlrc(instance.getCheckpointProgress()), + TransformIndexerPositionTests.fromHlrc(instance.getPosition()), + TransformProgressTests.fromHlrc(instance.getCheckpointProgress()), instance.getTimestampMillis(), instance.getTimeUpperBoundMillis()); } - public static TransformCheckpointStats randomDataFrameTransformCheckpointStats() { + public static TransformCheckpointStats randomTransformCheckpointStats() { return new TransformCheckpointStats(randomLongBetween(1, 1_000_000), - DataFrameIndexerPositionTests.randomDataFrameIndexerPosition(), - randomBoolean() ? null : DataFrameTransformProgressTests.randomDataFrameTransformProgress(), + TransformIndexerPositionTests.randomTransformIndexerPosition(), + randomBoolean() ? null : TransformProgressTests.randomTransformProgress(), randomLongBetween(1, 1_000_000), randomLongBetween(0, 1_000_000)); } @Override protected TransformCheckpointStats createServerTestInstance(XContentType xContentType) { - return randomDataFrameTransformCheckpointStats(); + return randomTransformCheckpointStats(); } @Override - protected org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats doParseToClientInstance(XContentParser parser) + protected org.elasticsearch.client.transform.transforms.TransformCheckpointStats doParseToClientInstance(XContentParser parser) throws IOException { - return org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats.fromXContent(parser); + return org.elasticsearch.client.transform.transforms.TransformCheckpointStats.fromXContent(parser); } @Override protected void assertInstances(TransformCheckpointStats serverTestInstance, - org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointStats clientInstance) { + org.elasticsearch.client.transform.transforms.TransformCheckpointStats clientInstance) { assertThat(serverTestInstance.getCheckpoint(), equalTo(clientInstance.getCheckpoint())); assertThat(serverTestInstance.getPosition().getBucketsPosition(), equalTo(clientInstance.getPosition().getBucketsPosition())); assertThat(serverTestInstance.getPosition().getIndexerPosition(), equalTo(clientInstance.getPosition().getIndexerPosition())); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformCheckpointingInfoTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformCheckpointingInfoTests.java similarity index 65% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformCheckpointingInfoTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformCheckpointingInfoTests.java index 45db79b8256d..663d32ff3c4f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformCheckpointingInfoTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformCheckpointingInfoTests.java @@ -27,41 +27,41 @@ import java.io.IOException; import java.time.Instant; import java.util.function.Predicate; -public class DataFrameTransformCheckpointingInfoTests extends AbstractHlrcXContentTestCase< +public class TransformCheckpointingInfoTests extends AbstractHlrcXContentTestCase< TransformCheckpointingInfo, - org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo> { + org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo> { public static TransformCheckpointingInfo fromHlrc( - org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo instance) { + org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo instance) { return new TransformCheckpointingInfo( - DataFrameTransformCheckpointStatsTests.fromHlrc(instance.getLast()), - DataFrameTransformCheckpointStatsTests.fromHlrc(instance.getNext()), + TransformCheckpointStatsTests.fromHlrc(instance.getLast()), + TransformCheckpointStatsTests.fromHlrc(instance.getNext()), instance.getOperationsBehind(), instance.getChangesLastDetectedAt()); } @Override - public org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo doHlrcParseInstance(XContentParser parser) { - return org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo.fromXContent(parser); + public org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo doHlrcParseInstance(XContentParser parser) { + return org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo.fromXContent(parser); } @Override public TransformCheckpointingInfo convertHlrcToInternal( - org.elasticsearch.client.transform.transforms.DataFrameTransformCheckpointingInfo instance) { + org.elasticsearch.client.transform.transforms.TransformCheckpointingInfo instance) { return fromHlrc(instance); } - public static TransformCheckpointingInfo randomDataFrameTransformCheckpointingInfo() { + public static TransformCheckpointingInfo randomTransformCheckpointingInfo() { return new TransformCheckpointingInfo( - DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(), - DataFrameTransformCheckpointStatsTests.randomDataFrameTransformCheckpointStats(), + TransformCheckpointStatsTests.randomTransformCheckpointStats(), + TransformCheckpointStatsTests.randomTransformCheckpointStats(), randomNonNegativeLong(), randomBoolean() ? null : Instant.ofEpochMilli(randomNonNegativeLong())); } @Override protected TransformCheckpointingInfo createTestInstance() { - return randomDataFrameTransformCheckpointingInfo(); + return randomTransformCheckpointingInfo(); } @Override diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameIndexerPositionTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformIndexerPositionTests.java similarity index 81% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameIndexerPositionTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformIndexerPositionTests.java index a26b94482aef..6ec176294c77 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameIndexerPositionTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformIndexerPositionTests.java @@ -29,35 +29,35 @@ import java.util.Map; import static org.hamcrest.Matchers.equalTo; -public class DataFrameIndexerPositionTests extends AbstractResponseTestCase< +public class TransformIndexerPositionTests extends AbstractResponseTestCase< TransformIndexerPosition, - org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition> { + org.elasticsearch.client.transform.transforms.TransformIndexerPosition> { public static TransformIndexerPosition fromHlrc( - org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition instance) { + org.elasticsearch.client.transform.transforms.TransformIndexerPosition instance) { if (instance == null) { return null; } return new TransformIndexerPosition(instance.getIndexerPosition(), instance.getBucketsPosition()); } - public static TransformIndexerPosition randomDataFrameIndexerPosition() { + public static TransformIndexerPosition randomTransformIndexerPosition() { return new TransformIndexerPosition(randomPositionMap(), randomPositionMap()); } @Override protected TransformIndexerPosition createServerTestInstance(XContentType xContentType) { - return randomDataFrameIndexerPosition(); + return randomTransformIndexerPosition(); } @Override - protected org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition doParseToClientInstance(XContentParser parser) { - return org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition.fromXContent(parser); + protected org.elasticsearch.client.transform.transforms.TransformIndexerPosition doParseToClientInstance(XContentParser parser) { + return org.elasticsearch.client.transform.transforms.TransformIndexerPosition.fromXContent(parser); } @Override protected void assertInstances(TransformIndexerPosition serverTestInstance, - org.elasticsearch.client.transform.transforms.DataFrameIndexerPosition clientInstance) { + org.elasticsearch.client.transform.transforms.TransformIndexerPosition clientInstance) { assertThat(serverTestInstance.getIndexerPosition(), equalTo(clientInstance.getIndexerPosition())); assertThat(serverTestInstance.getBucketsPosition(), equalTo(clientInstance.getBucketsPosition())); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameIndexerTransformStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformIndexerStatsTests.java similarity index 82% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameIndexerTransformStatsTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformIndexerStatsTests.java index 0c696c7368b1..e4e22bf3611e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameIndexerTransformStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformIndexerStatsTests.java @@ -25,12 +25,12 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats; import java.io.IOException; -public class DataFrameIndexerTransformStatsTests extends AbstractHlrcXContentTestCase< +public class TransformIndexerStatsTests extends AbstractHlrcXContentTestCase< TransformIndexerStats, - org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats> { + org.elasticsearch.client.transform.transforms.TransformIndexerStats> { public static TransformIndexerStats fromHlrc( - org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats instance) { + org.elasticsearch.client.transform.transforms.TransformIndexerStats instance) { return new TransformIndexerStats( instance.getNumPages(), instance.getNumDocuments(), @@ -48,14 +48,14 @@ public class DataFrameIndexerTransformStatsTests extends AbstractHlrcXContentTes } @Override - public org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats doHlrcParseInstance(XContentParser parser) + public org.elasticsearch.client.transform.transforms.TransformIndexerStats doHlrcParseInstance(XContentParser parser) throws IOException { - return org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats.fromXContent(parser); + return org.elasticsearch.client.transform.transforms.TransformIndexerStats.fromXContent(parser); } @Override public TransformIndexerStats convertHlrcToInternal( - org.elasticsearch.client.transform.transforms.DataFrameIndexerTransformStats instance) { + org.elasticsearch.client.transform.transforms.TransformIndexerStats instance) { return fromHlrc(instance); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformProgressTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformProgressTests.java similarity index 79% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformProgressTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformProgressTests.java index ff6e797b7dd7..1d0b5257b7d0 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformProgressTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformProgressTests.java @@ -26,12 +26,12 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformProgress; import static org.hamcrest.Matchers.equalTo; -public class DataFrameTransformProgressTests extends AbstractResponseTestCase< +public class TransformProgressTests extends AbstractResponseTestCase< TransformProgress, - org.elasticsearch.client.transform.transforms.DataFrameTransformProgress> { + org.elasticsearch.client.transform.transforms.TransformProgress> { public static TransformProgress fromHlrc( - org.elasticsearch.client.transform.transforms.DataFrameTransformProgress instance) { + org.elasticsearch.client.transform.transforms.TransformProgress instance) { if (instance == null) { return null; } @@ -41,7 +41,7 @@ public class DataFrameTransformProgressTests extends AbstractResponseTestCase< instance.getDocumentsIndexed()); } - public static TransformProgress randomDataFrameTransformProgress() { + public static TransformProgress randomTransformProgress() { Long totalDocs = randomBoolean() ? null : randomNonNegativeLong(); Long docsRemaining = totalDocs != null ? randomLongBetween(0, totalDocs) : null; return new TransformProgress( @@ -53,17 +53,17 @@ public class DataFrameTransformProgressTests extends AbstractResponseTestCase< @Override protected TransformProgress createServerTestInstance(XContentType xContentType) { - return randomDataFrameTransformProgress(); + return randomTransformProgress(); } @Override - protected org.elasticsearch.client.transform.transforms.DataFrameTransformProgress doParseToClientInstance(XContentParser parser) { - return org.elasticsearch.client.transform.transforms.DataFrameTransformProgress.fromXContent(parser); + protected org.elasticsearch.client.transform.transforms.TransformProgress doParseToClientInstance(XContentParser parser) { + return org.elasticsearch.client.transform.transforms.TransformProgress.fromXContent(parser); } @Override protected void assertInstances(TransformProgress serverTestInstance, - org.elasticsearch.client.transform.transforms.DataFrameTransformProgress clientInstance) { + org.elasticsearch.client.transform.transforms.TransformProgress clientInstance) { assertThat(serverTestInstance.getTotalDocs(), equalTo(clientInstance.getTotalDocs())); assertThat(serverTestInstance.getDocumentsProcessed(), equalTo(clientInstance.getDocumentsProcessed())); assertThat(serverTestInstance.getPercentComplete(), equalTo(clientInstance.getPercentComplete())); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformStatsTests.java similarity index 75% rename from client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformStatsTests.java rename to client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformStatsTests.java index f58947be54e9..5c15fb7bed79 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/DataFrameTransformStatsTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/hlrc/TransformStatsTests.java @@ -34,8 +34,8 @@ import java.util.HashMap; import java.util.Map; import java.util.function.Predicate; -public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase { +public class TransformStatsTests extends AbstractHlrcXContentTestCase { public static NodeAttributes fromHlrc(org.elasticsearch.client.transform.transforms.NodeAttributes attributes) { return attributes == null ? null : new NodeAttributes(attributes.getId(), @@ -46,45 +46,45 @@ public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase field.contains("position") || field.equals("node.attributes"); } - public static TransformProgress randomDataFrameTransformProgress() { + public static TransformProgress randomTransformProgress() { Long totalDocs = randomBoolean() ? null : randomNonNegativeLong(); Long docsRemaining = totalDocs != null ? randomLongBetween(0, totalDocs) : null; return new TransformProgress( @@ -107,16 +107,16 @@ public class DataFrameTransformStatsTests extends AbstractHlrcXContentTestCase integTest.nodes[0].homeDir}/logs/${ -> integTest.nodes[0].clusterName }_server.json" + "${ -> testClusters.integTest.singleNode().getServerLog()}" } else { systemProperty 'tests.logfile', '--external--' } diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/JsonLogsFormatAndParseIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/JsonLogsFormatAndParseIT.java index 12c916946085..408f6ee631db 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/JsonLogsFormatAndParseIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/JsonLogsFormatAndParseIT.java @@ -35,7 +35,7 @@ import static org.hamcrest.Matchers.is; public class JsonLogsFormatAndParseIT extends JsonLogsIntegTestCase { @Override protected Matcher nodeNameMatcher() { - return is("node-0"); + return is("integTest-0"); } @Override diff --git a/distribution/build.gradle b/distribution/build.gradle index 93c4012cda2a..f20e5a654260 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -280,8 +280,9 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { modulesFiles = { oss, platform -> copySpec { eachFile { - if (it.relativePath.segments[-2] == 'bin') { + if (it.relativePath.segments[-2] == 'bin' || (platform == 'darwin' && it.relativePath.segments[-2] == 'MacOS')) { // bin files, wherever they are within modules (eg platform specific) should be executable + // and MacOS is an alternative to bin on macOS it.mode = 0755 } else { it.mode = 0644 @@ -398,6 +399,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { } return copySpec { from project.jdks."bundled_${platform}" + exclude "demo/**" eachFile { FileCopyDetails details -> if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') { details.mode = 0755 diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index 7bf973e7edc9..e4f0a04d4e9d 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -6,6 +6,8 @@ import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.test.fixtures' +testFixtures.useFixture() + configurations { dockerPlugins dockerSource diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 136803a5d83e..a1ab72890a43 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -407,163 +407,168 @@ subprojects { 'default' buildDist } - // sanity checks if packages can be extracted - final File extractionDir = new File(buildDir, 'extracted') - File packageExtractionDir - if (project.name.contains('deb')) { - packageExtractionDir = new File(extractionDir, 'deb-extracted') - } else { - assert project.name.contains('rpm') - packageExtractionDir = new File(extractionDir, 'rpm-extracted') - } - task checkExtraction(type: LoggedExec) { - dependsOn buildDist - doFirst { - project.delete(extractionDir) - extractionDir.mkdirs() - } - } - check.dependsOn checkExtraction - if (project.name.contains('deb')) { - checkExtraction { - onlyIf dpkgExists - commandLine 'dpkg-deb', '-x', "${-> buildDist.outputs.files.filter(debFilter).singleFile}", packageExtractionDir - } - } else { - assert project.name.contains('rpm') - checkExtraction { - onlyIf rpmExists - final File rpmDatabase = new File(extractionDir, 'rpm-database') - commandLine 'rpm', - '--badreloc', - '--nodeps', - '--noscripts', - '--notriggers', - '--dbpath', - rpmDatabase, - '--relocate', - "/=${packageExtractionDir}", - '-i', - "${-> buildDist.outputs.files.singleFile}" - } - } + if (dpkgExists() || rpmExists()) { - task checkLicense { - dependsOn buildDist, checkExtraction - } - check.dependsOn checkLicense - if (project.name.contains('deb')) { - checkLicense { - onlyIf dpkgExists - doLast { - Path copyrightPath - String expectedLicense - String licenseFilename - if (project.name.contains('oss-')) { - copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch-oss/copyright") - expectedLicense = "ASL-2.0" - licenseFilename = "APACHE-LICENSE-2.0.txt" - } else { - copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch/copyright") - expectedLicense = "Elastic-License" - licenseFilename = "ELASTIC-LICENSE.txt" - } - final List header = Arrays.asList("Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/", - "Copyright: Elasticsearch B.V. ", - "License: " + expectedLicense) - final List licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) - final List expectedLines = header + licenseLines.collect { " " + it } - assertLinesInFile(copyrightPath, expectedLines) + // sanity checks if packages can be extracted + final File extractionDir = new File(buildDir, 'extracted') + File packageExtractionDir + if (project.name.contains('deb')) { + packageExtractionDir = new File(extractionDir, 'deb-extracted') + } else { + assert project.name.contains('rpm') + packageExtractionDir = new File(extractionDir, 'rpm-extracted') + } + task checkExtraction(type: LoggedExec) { + dependsOn buildDist + doFirst { + project.delete(extractionDir) + extractionDir.mkdirs() } } - } else { - assert project.name.contains('rpm') - checkLicense { - onlyIf rpmExists - doLast { - String licenseFilename - if (project.name.contains('oss-')) { - licenseFilename = "APACHE-LICENSE-2.0.txt" - } else { - licenseFilename = "ELASTIC-LICENSE.txt" - } - final List licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) - final Path licensePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/LICENSE.txt") - assertLinesInFile(licensePath, licenseLines) + + check.dependsOn checkExtraction + if (project.name.contains('deb')) { + checkExtraction { + onlyIf dpkgExists + commandLine 'dpkg-deb', '-x', "${-> buildDist.outputs.files.filter(debFilter).singleFile}", packageExtractionDir + } + } else { + assert project.name.contains('rpm') + checkExtraction { + onlyIf rpmExists + final File rpmDatabase = new File(extractionDir, 'rpm-database') + commandLine 'rpm', + '--badreloc', + '--nodeps', + '--noscripts', + '--notriggers', + '--dbpath', + rpmDatabase, + '--relocate', + "/=${packageExtractionDir}", + '-i', + "${-> buildDist.outputs.files.singleFile}" } } - } - task checkNotice { - dependsOn buildDist, checkExtraction - onlyIf { (project.name.contains('deb') && dpkgExists.call(it)) || (project.name.contains('rpm') && rpmExists.call(it)) } - doLast { - final List noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2018 Elasticsearch") - final Path noticePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/NOTICE.txt") - assertLinesInFile(noticePath, noticeLines) + task checkLicense { + dependsOn buildDist, checkExtraction } - } - check.dependsOn checkNotice - - task checkLicenseMetadata(type: LoggedExec) { - dependsOn buildDist, checkExtraction - } - check.dependsOn checkLicenseMetadata - if (project.name.contains('deb')) { - checkLicenseMetadata { LoggedExec exec -> - onlyIf dpkgExists - final ByteArrayOutputStream output = new ByteArrayOutputStream() - exec.commandLine 'dpkg-deb', '--info', "${ -> buildDist.outputs.files.filter(debFilter).singleFile}" - exec.standardOutput = output - doLast { - String expectedLicense - if (project.name.contains('oss-')) { - expectedLicense = "ASL-2.0" - } else { - expectedLicense = "Elastic-License" + check.dependsOn checkLicense + if (project.name.contains('deb')) { + checkLicense { + onlyIf dpkgExists + doLast { + Path copyrightPath + String expectedLicense + String licenseFilename + if (project.name.contains('oss-')) { + copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch-oss/copyright") + expectedLicense = "ASL-2.0" + licenseFilename = "APACHE-LICENSE-2.0.txt" + } else { + copyrightPath = packageExtractionDir.toPath().resolve("usr/share/doc/elasticsearch/copyright") + expectedLicense = "Elastic-License" + licenseFilename = "ELASTIC-LICENSE.txt" + } + final List header = Arrays.asList("Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/", + "Copyright: Elasticsearch B.V. ", + "License: " + expectedLicense) + final List licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) + final List expectedLines = header + licenseLines.collect { " " + it } + assertLinesInFile(copyrightPath, expectedLines) } - final Pattern pattern = Pattern.compile("\\s*License: (.+)") - final String info = output.toString('UTF-8') - final String[] actualLines = info.split("\n") - int count = 0 - for (final String actualLine : actualLines) { - final Matcher matcher = pattern.matcher(actualLine) - if (matcher.matches()) { - count++ - final String actualLicense = matcher.group(1) - if (expectedLicense != actualLicense) { - throw new GradleException("expected license [${expectedLicense} for package info but found [${actualLicense}]") + } + } else { + assert project.name.contains('rpm') + checkLicense { + onlyIf rpmExists + doLast { + String licenseFilename + if (project.name.contains('oss-')) { + licenseFilename = "APACHE-LICENSE-2.0.txt" + } else { + licenseFilename = "ELASTIC-LICENSE.txt" + } + final List licenseLines = Files.readAllLines(rootDir.toPath().resolve("licenses/" + licenseFilename)) + final Path licensePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/LICENSE.txt") + assertLinesInFile(licensePath, licenseLines) + } + } + } + + task checkNotice { + dependsOn buildDist, checkExtraction + onlyIf { + (project.name.contains('deb') && dpkgExists.call(it)) || (project.name.contains('rpm') && rpmExists.call(it)) + } + doLast { + final List noticeLines = Arrays.asList("Elasticsearch", "Copyright 2009-2018 Elasticsearch") + final Path noticePath = packageExtractionDir.toPath().resolve("usr/share/elasticsearch/NOTICE.txt") + assertLinesInFile(noticePath, noticeLines) + } + } + check.dependsOn checkNotice + + task checkLicenseMetadata(type: LoggedExec) { + dependsOn buildDist, checkExtraction + } + check.dependsOn checkLicenseMetadata + if (project.name.contains('deb')) { + checkLicenseMetadata { LoggedExec exec -> + onlyIf dpkgExists + final ByteArrayOutputStream output = new ByteArrayOutputStream() + exec.commandLine 'dpkg-deb', '--info', "${-> buildDist.outputs.files.filter(debFilter).singleFile}" + exec.standardOutput = output + doLast { + String expectedLicense + if (project.name.contains('oss-')) { + expectedLicense = "ASL-2.0" + } else { + expectedLicense = "Elastic-License" + } + final Pattern pattern = Pattern.compile("\\s*License: (.+)") + final String info = output.toString('UTF-8') + final String[] actualLines = info.split("\n") + int count = 0 + for (final String actualLine : actualLines) { + final Matcher matcher = pattern.matcher(actualLine) + if (matcher.matches()) { + count++ + final String actualLicense = matcher.group(1) + if (expectedLicense != actualLicense) { + throw new GradleException("expected license [${expectedLicense} for package info but found [${actualLicense}]") + } } } - } - if (count == 0) { - throw new GradleException("expected license [${expectedLicense}] for package info but found none in:\n${info}") - } - if (count > 1) { - throw new GradleException("expected a single license for package info but found [${count}] in:\n${info}") + if (count == 0) { + throw new GradleException("expected license [${expectedLicense}] for package info but found none in:\n${info}") + } + if (count > 1) { + throw new GradleException("expected a single license for package info but found [${count}] in:\n${info}") + } } } - } - } else { - assert project.name.contains('rpm') - checkLicenseMetadata { LoggedExec exec -> - onlyIf rpmExists - final ByteArrayOutputStream output = new ByteArrayOutputStream() - exec.commandLine 'rpm', '-qp', '--queryformat', '%{License}', "${-> buildDist.outputs.files.singleFile}" - exec.standardOutput = output - doLast { - String license = output.toString('UTF-8') - String expectedLicense - if (project.name.contains('oss-')) { - expectedLicense = "ASL 2.0" - } else { - expectedLicense = "Elastic License" - } - if (license != expectedLicense) { - throw new GradleException("expected license [${expectedLicense}] for [${-> buildDist.outputs.files.singleFile}] but was [${license}]") + } else { + assert project.name.contains('rpm') + checkLicenseMetadata { LoggedExec exec -> + onlyIf rpmExists + final ByteArrayOutputStream output = new ByteArrayOutputStream() + exec.commandLine 'rpm', '-qp', '--queryformat', '%{License}', "${-> buildDist.outputs.files.singleFile}" + exec.standardOutput = output + doLast { + String license = output.toString('UTF-8') + String expectedLicense + if (project.name.contains('oss-')) { + expectedLicense = "ASL 2.0" + } else { + expectedLicense = "Elastic License" + } + if (license != expectedLicense) { + throw new GradleException("expected license [${expectedLicense}] for [${-> buildDist.outputs.files.singleFile}] but was [${license}]") + } } } } } - } diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index fa3de68cfb87..1c329d39e146 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -41,7 +41,8 @@ # -XX:-UseConcMarkSweepGC # -XX:-UseCMSInitiatingOccupancyOnly # -XX:+UseG1GC -# -XX:InitiatingHeapOccupancyPercent=75 +# -XX:G1ReservePercent=25 +# -XX:InitiatingHeapOccupancyPercent=30 ## DNS cache policy # cache ttl in seconds for positive DNS lookups noting that this overrides the diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index d90e5972c9f5..f404f941168d 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -23,6 +23,7 @@ import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.lucene.search.spell.LevenshteinDistance; import org.apache.lucene.util.CollectionUtil; +import org.apache.lucene.util.Constants; import org.bouncycastle.bcpg.ArmoredInputStream; import org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider; import org.bouncycastle.openpgp.PGPException; @@ -836,7 +837,10 @@ class InstallPluginCommand extends EnvironmentAwareCommand { Files.walkFileTree(destination, new SimpleFileVisitor() { @Override public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException { - if ("bin".equals(file.getParent().getFileName().toString())) { + final String parentDirName = file.getParent().getFileName().toString(); + if ("bin".equals(parentDirName) + // "MacOS" is an alternative to "bin" on macOS + || (Constants.MAC_OS_X && "MacOS".equals(parentDirName))) { setFileAttributes(file, BIN_FILES_PERMS); } else { setFileAttributes(file, PLUGIN_FILES_PERMS); diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 10da73805d22..35fb7bda1f4d 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -580,7 +580,7 @@ include::ilm/remove_lifecycle_policy_from_index.asciidoc[] == {transform-cap} APIs :upid: {mainid} -:doc-tests-file: {doc-tests}/DataFrameTransformDocumentationIT.java +:doc-tests-file: {doc-tests}/TransformDocumentationIT.java The Java High Level REST Client supports the following {transform} APIs: diff --git a/docs/java-rest/high-level/transform/delete_transform.asciidoc b/docs/java-rest/high-level/transform/delete_transform.asciidoc index 31a86dd80b4e..8416ce40e371 100644 --- a/docs/java-rest/high-level/transform/delete_transform.asciidoc +++ b/docs/java-rest/high-level/transform/delete_transform.asciidoc @@ -1,6 +1,6 @@ -- :api: delete-transform -:request: DeleteDataFrameTransformRequest +:request: DeleteTransformRequest :response: AcknowledgedResponse -- [role="xpack"] diff --git a/docs/java-rest/high-level/transform/get_transform.asciidoc b/docs/java-rest/high-level/transform/get_transform.asciidoc index f9c8c4a09809..64aa0f229c42 100644 --- a/docs/java-rest/high-level/transform/get_transform.asciidoc +++ b/docs/java-rest/high-level/transform/get_transform.asciidoc @@ -1,7 +1,7 @@ -- :api: get-transform -:request: GetDataFrameTransformRequest -:response: GetDataFrameTransformResponse +:request: GetTransformRequest +:response: GetTransformResponse -- [role="xpack"] [id="{upid}-{api}"] @@ -46,4 +46,4 @@ The returned +{response}+ contains the requested {transforms}. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{doc-tests-file}[{api}-response] --------------------------------------------------- \ No newline at end of file +-------------------------------------------------- diff --git a/docs/java-rest/high-level/transform/get_transform_stats.asciidoc b/docs/java-rest/high-level/transform/get_transform_stats.asciidoc index 7674dab7b0a1..cd2fcf2237cc 100644 --- a/docs/java-rest/high-level/transform/get_transform_stats.asciidoc +++ b/docs/java-rest/high-level/transform/get_transform_stats.asciidoc @@ -1,7 +1,7 @@ -- :api: get-transform-stats -:request: GetDataFrameTransformStatsRequest -:response: GetDataFrameTransformStatsResponse +:request: GetTransformStatsRequest +:response: GetTransformStatsResponse -- [role="xpack"] [id="{upid}-{api}"] @@ -48,7 +48,7 @@ The returned +{response}+ contains the requested {transform} statistics. -------------------------------------------------- include-tagged::{doc-tests-file}[{api}-response] -------------------------------------------------- -<1> The response contains a list of `DataFrameTransformStats` objects +<1> The response contains a list of `TransformStats` objects <2> The running state of the {transform}, for example `started`, `indexing`, etc. <3> The overall {transform} statistics recording the number of documents indexed etc. <4> The progress of the current run in the {transform}. Supplies the number of docs left until the next checkpoint diff --git a/docs/java-rest/high-level/transform/preview_transform.asciidoc b/docs/java-rest/high-level/transform/preview_transform.asciidoc index 5de00f5891f2..377aba597a67 100644 --- a/docs/java-rest/high-level/transform/preview_transform.asciidoc +++ b/docs/java-rest/high-level/transform/preview_transform.asciidoc @@ -1,7 +1,7 @@ -- :api: preview-transform -:request: PreviewDataFrameTransformRequest -:response: PreviewDataFrameTransformResponse +:request: PreviewTransformRequest +:response: PreviewTransformResponse -- [role="xpack"] [id="{upid}-{api}"] diff --git a/docs/java-rest/high-level/transform/put_transform.asciidoc b/docs/java-rest/high-level/transform/put_transform.asciidoc index b84ba1329cda..53b0445564a6 100644 --- a/docs/java-rest/high-level/transform/put_transform.asciidoc +++ b/docs/java-rest/high-level/transform/put_transform.asciidoc @@ -1,6 +1,6 @@ -- :api: put-transform -:request: PutDataFrameTransformRequest +:request: PutTransformRequest :response: AcknowledgedResponse -- [role="xpack"] @@ -29,7 +29,7 @@ such an error will not be visible until `_start` is called. [id="{upid}-{api}-config"] ==== {transform-cap} configuration -The `DataFrameTransformConfig` object contains all the details about the +The `TransformConfig` object contains all the details about the {transform} configuration and contains the following arguments: ["source","java",subs="attributes,callouts,macros"] diff --git a/docs/java-rest/high-level/transform/start_transform.asciidoc b/docs/java-rest/high-level/transform/start_transform.asciidoc index 69aea67dc2b8..9de2a0da23d2 100644 --- a/docs/java-rest/high-level/transform/start_transform.asciidoc +++ b/docs/java-rest/high-level/transform/start_transform.asciidoc @@ -1,7 +1,7 @@ -- :api: start-transform -:request: StartDataFrameTransformRequest -:response: StartDataFrameTransformResponse +:request: StartTransformRequest +:response: StartTransformResponse -- [role="xpack"] [id="{upid}-{api}"] @@ -37,4 +37,4 @@ include::../execution.asciidoc[] ==== Response The returned +{response}+ object acknowledges the {transform} has -started. \ No newline at end of file +started. diff --git a/docs/java-rest/high-level/transform/stop_transform.asciidoc b/docs/java-rest/high-level/transform/stop_transform.asciidoc index 36a1491615ec..660594de39f3 100644 --- a/docs/java-rest/high-level/transform/stop_transform.asciidoc +++ b/docs/java-rest/high-level/transform/stop_transform.asciidoc @@ -1,7 +1,7 @@ -- :api: stop-transform -:request: StopDataFrameTransformRequest -:response: StopDataFrameTransformResponse +:request: StopTransformRequest +:response: StopTransformResponse -- [role="xpack"] [id="{upid}-{api}"] @@ -31,7 +31,7 @@ The following arguments are optional. -------------------------------------------------- include-tagged::{doc-tests-file}[{api}-request-options] -------------------------------------------------- -<1> If true wait for the data frame task to stop before responding +<1> If true wait for the transform task to stop before responding <2> Controls the amount of time to wait until the {transform} stops. <3> Whether to ignore if a wildcard expression matches no {transforms}. @@ -39,4 +39,4 @@ include::../execution.asciidoc[] ==== Response -The returned +{response}+ object acknowledges the {transform} has stopped. \ No newline at end of file +The returned +{response}+ object acknowledges the {transform} has stopped. diff --git a/docs/java-rest/high-level/transform/update_transform.asciidoc b/docs/java-rest/high-level/transform/update_transform.asciidoc index 37f60eacf470..ffde48ae1864 100644 --- a/docs/java-rest/high-level/transform/update_transform.asciidoc +++ b/docs/java-rest/high-level/transform/update_transform.asciidoc @@ -1,7 +1,7 @@ -- :api: update-transform -:request: UpdateDataFrameTransformRequest -:response: UpdateDataFrameTransformResponse +:request: UpdateTransformRequest +:response: UpdateTransformResponse -- [role="xpack"] [id="{upid}-{api}"] @@ -30,7 +30,7 @@ such an error will not be visible until `_start` is called. [id="{upid}-{api}-config"] ==== {transform-cap} update configuration -The `DataFrameTransformConfigUpdate` object contains all the details about updated +The `TransformConfigUpdate` object contains all the details about updated {transform} configuration and contains the following arguments: ["source","java",subs="attributes,callouts,macros"] diff --git a/docs/plugins/mapper-annotated-text.asciidoc b/docs/plugins/mapper-annotated-text.asciidoc index e6d1628b717b..8cac0aec7080 100644 --- a/docs/plugins/mapper-annotated-text.asciidoc +++ b/docs/plugins/mapper-annotated-text.asciidoc @@ -114,12 +114,12 @@ in this example where a search for `Beck` will not match `Jeff Beck` : # Example documents PUT my_index/_doc/1 { - "my_field": "[Beck](Beck) announced a new tour"<2> + "my_field": "[Beck](Beck) announced a new tour"<1> } PUT my_index/_doc/2 { - "my_field": "[Jeff Beck](Jeff+Beck&Guitarist) plays a strat"<1> + "my_field": "[Jeff Beck](Jeff+Beck&Guitarist) plays a strat"<2> } # Example search diff --git a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc index 1f43a038e7c1..e3cc9a8ce4ef 100644 --- a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc @@ -89,7 +89,6 @@ Possible response: "hits": [ { "_index": "sales", - "_type": "_doc", "_id": "AVnNBmauCQpcRyxw6ChK", "_source": { "date": "2015/03/01 00:00:00", @@ -117,7 +116,6 @@ Possible response: "hits": [ { "_index": "sales", - "_type": "_doc", "_id": "AVnNBmauCQpcRyxw6ChL", "_source": { "date": "2015/03/01 00:00:00", @@ -145,7 +143,6 @@ Possible response: "hits": [ { "_index": "sales", - "_type": "_doc", "_id": "AVnNBmatCQpcRyxw6ChH", "_source": { "date": "2015/01/01 00:00:00", @@ -331,7 +328,6 @@ Top hits response snippet with a nested hit, which resides in the first slot of "hits": [ { "_index": "sales", - "_type" : "_doc", "_id": "1", "_nested": { "field": "comments", <1> @@ -385,7 +381,6 @@ the second slow of the `nested_child_field` field: "hits": [ { "_index": "a", - "_type": "b", "_id": "1", "_score": 1, "_nested" : { diff --git a/docs/reference/aggregations/misc.asciidoc b/docs/reference/aggregations/misc.asciidoc index 99a19247acfc..6bb30daf3258 100644 --- a/docs/reference/aggregations/misc.asciidoc +++ b/docs/reference/aggregations/misc.asciidoc @@ -138,7 +138,6 @@ In the response, the aggregations names will be changed to respectively `date_hi "hits" : [ { "_index": "twitter", - "_type": "_doc", "_id": "0", "_score": 1.0, "_source": { diff --git a/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc b/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc index d6f4670fb404..2a3cf5f999e5 100644 --- a/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc @@ -243,7 +243,6 @@ The output from the above is: "hits": [ { "_index": "my_index", - "_type": "_doc", "_id": "1", "_score": 0.2876821, "_source": { diff --git a/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc index bf414e509a66..8d737a599595 100644 --- a/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc @@ -301,7 +301,6 @@ GET my_index/_search "hits": [ { "_index": "my_index", - "_type": "_doc", "_id": "1", "_score": 0.5753642, "_source": { diff --git a/docs/reference/ccr/overview.asciidoc b/docs/reference/ccr/overview.asciidoc index 8f25cb1da473..16f7c7d48df1 100644 --- a/docs/reference/ccr/overview.asciidoc +++ b/docs/reference/ccr/overview.asciidoc @@ -17,6 +17,7 @@ Replication is pull-based. This means that replication is driven by the follower index. This simplifies state management on the leader index and means that {ccr} does not interfere with indexing on the leader index. +IMPORTANT: {ccr-cap} requires <>. ==== Configuring replication @@ -213,4 +214,4 @@ following process starts again. You can terminate replication with the {ref}/ccr-post-unfollow.html[unfollow API]. This API converts a follower index -to a regular (non-follower) index. \ No newline at end of file +to a regular (non-follower) index. diff --git a/docs/reference/data-rollup-transform.asciidoc b/docs/reference/data-rollup-transform.asciidoc new file mode 100644 index 000000000000..413b7d89d825 --- /dev/null +++ b/docs/reference/data-rollup-transform.asciidoc @@ -0,0 +1,20 @@ +[[data-rollup-transform]] += Roll up or transform your data + +[partintro] +-- + +{es} offers the following methods for manipulating your data: + +* <> ++ +include::rollup/index.asciidoc[tag=rollup-intro] +* <> ++ +include::transform/index.asciidoc[tag=transform-intro] + +-- + +include::rollup/index.asciidoc[] + +include::transform/index.asciidoc[] diff --git a/docs/reference/docs/delete-by-query.asciidoc b/docs/reference/docs/delete-by-query.asciidoc index dd870cef0b19..71d00528e80c 100644 --- a/docs/reference/docs/delete-by-query.asciidoc +++ b/docs/reference/docs/delete-by-query.asciidoc @@ -19,11 +19,6 @@ POST /twitter/_delete_by_query -------------------------------------------------- // TEST[setup:big_twitter] -[[docs-delete-by-query-api-request]] -==== {api-request-title} - -`POST //_delete_by_query` - //// [source,console-result] @@ -49,6 +44,11 @@ POST /twitter/_delete_by_query // TESTRESPONSE[s/"took" : 147/"took" : "$body.took"/] //// +[[docs-delete-by-query-api-request]] +==== {api-request-title} + +`POST //_delete_by_query` + [[docs-delete-by-query-api-desc]] ==== {api-description-title} @@ -89,8 +89,7 @@ request to be refreshed. Unlike the delete API, it does not support If the request contains `wait_for_completion=false`, {es} performs some preflight checks, launches the request, and returns a -<> -you can use to cancel or get the status of the task. {es} creates a +<> you can use to cancel or get the status of the task. {es} creates a record of this task as a document at `.tasks/task/${taskId}`. When you are done with a task, you should delete the task document so {es} can reclaim the space. @@ -225,8 +224,6 @@ include::{docdir}/rest-api/common-parms.asciidoc[tag=timeout] include::{docdir}/rest-api/common-parms.asciidoc[tag=version] -include::{docdir}/rest-api/common-parms.asciidoc[tag=timeout] - include::{docdir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] [[docs-delete-by-query-api-request-body]] @@ -237,7 +234,7 @@ include::{docdir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] using the <>. -[[docs-delete-by-quer-api-response-body]] +[[docs-delete-by-query-api-response-body]] ==== Response body ////////////////////////// @@ -328,7 +325,7 @@ The number of requests per second effectively executed during the delete by quer `throttled_until_millis`:: This field should always be equal to zero in a `_delete_by_query` response. It only -has meaning when using the <>, where it +has meaning when using the <>, where it indicates the next time (in milliseconds since epoch) a throttled request will be executed again in order to conform to `requests_per_second`. @@ -539,7 +536,7 @@ Adding `slices` to `_delete_by_query` just automates the manual process used in the section above, creating sub-requests which means it has some quirks: * You can see these requests in the -<>. These sub-requests are "child" +<>. These sub-requests are "child" tasks of the task for the request with `slices`. * Fetching the status of the task for the request with `slices` only contains the status of completed slices. @@ -653,7 +650,7 @@ you to delete that document. [float] [[docs-delete-by-query-cancel-task-api]] -==== Cancel a delete by query operation +===== Cancel a delete by query operation Any delete by query can be canceled using the <>: diff --git a/docs/reference/docs/update-by-query.asciidoc b/docs/reference/docs/update-by-query.asciidoc index d85f76a6765a..aa49bda06fdd 100644 --- a/docs/reference/docs/update-by-query.asciidoc +++ b/docs/reference/docs/update-by-query.asciidoc @@ -1,10 +1,12 @@ [[docs-update-by-query]] === Update By Query API +++++ +Update by query +++++ -The simplest usage of `_update_by_query` just performs an update on every -document in the index without changing the source. This is useful to -<> or some other online -mapping change. Here is the API: +Updates documents that match the specified query. +If no query is specified, performs an update on every document in the index without +modifying the source, which is useful for picking up mapping changes. [source,console] -------------------------------------------------- @@ -12,7 +14,7 @@ POST twitter/_update_by_query?conflicts=proceed -------------------------------------------------- // TEST[setup:big_twitter] -That will return something like this: +//// [source,console-result] -------------------------------------------------- @@ -37,42 +39,260 @@ That will return something like this: -------------------------------------------------- // TESTRESPONSE[s/"took" : 147/"took" : "$body.took"/] -`_update_by_query` gets a snapshot of the index when it starts and indexes what -it finds using `internal` versioning. That means you'll get a version -conflict if the document changes between the time when the snapshot was taken -and when the index request is processed. When the versions match, the document -is updated and the version number is incremented. +//// -NOTE: Since `internal` versioning does not support the value 0 as a valid -version number, documents with version equal to zero cannot be updated using -`_update_by_query` and will fail the request. +[[docs-update-by-query-api-request]] +==== {api-request-title} -All update and query failures cause the `_update_by_query` to abort and are -returned in the `failures` of the response. The updates that have been -performed still stick. In other words, the process is not rolled back, only -aborted. While the first failure causes the abort, all failures that are -returned by the failing bulk request are returned in the `failures` element; therefore -it's possible for there to be quite a few failed entities. +`POST //_update_by_query` -If you want to simply count version conflicts, and not cause the `_update_by_query` -to abort, you can set `conflicts=proceed` on the url or `"conflicts": "proceed"` -in the request body. The first example does this because it is just trying to -pick up an online mapping change, and a version conflict simply means that the -conflicting document was updated between the start of the `_update_by_query` -and the time when it attempted to update the document. This is fine because -that update will have picked up the online mapping update. +[[docs-update-by-query-api-desc]] +==== {api-description-title} -Back to the API format, this will update tweets from the `twitter` index: +You can specify the query criteria in the request URI or the request body +using the same syntax as the <>. -[source,console] +When you submit an update by query request, {es} gets a snapshot of the index +when it begins processing the request and updates matching documents using +`internal` versioning. +When the versions match, the document is updated and the version number is incremented. +If a document changes between the time that the snapshot is taken and +the update operation is processed, it results in a version conflict and the operation fails. +You can opt to count version conflicts instead of halting and returning by +setting `conflicts` to `proceeed`. + +NOTE: Documents with a version equal to 0 cannot be updated using update by +query because `internal` versioning does not support 0 as a valid +version number. + +While processing an update by query request, {es} performs multiple search +requests sequentially to find all of the matching documents. +A bulk update request is performed for each batch of matching documents. +Any query or update failures cause the update by query request to fail and +the failures are shown in the response. +Any update requests that completed successfully still stick, they are not rolled back. + +===== Refreshing shards + +Specifying the `refresh` parameter refreshes all shards once the request completes. +This is different than the update API#8217;s `refresh` parameter, which causes just the shard +that received the request to be refreshed. Unlike the update API, it does not support +`wait_for`. + +[[docs-update-by-query-task-api]] +===== Running update by query asynchronously + +If the request contains `wait_for_completion=false`, {es} +performs some preflight checks, launches the request, and returns a +<> you can use to cancel or get the status of the task. +{es} creates a record of this task as a document at `.tasks/task/${taskId}`. +When you are done with a task, you should delete the task document so +{es} can reclaim the space. + +===== Waiting for active shards + +`wait_for_active_shards` controls how many copies of a shard must be active +before proceeding with the request. See <> +for details. `timeout` controls how long each write request waits for unavailable +shards to become available. Both work exactly the way they work in the +<>. Update by query uses scrolled searches, so you can also +specify the `scroll` parameter to control how long it keeps the search context +alive, for example `?scroll=10m`. The default is 5 minutes. + +===== Throttling update requests + +To control the rate at which update by query issues batches of update operations, +you can set `requests_per_second` to any positive decimal number. This pads each +batch with a wait time to throttle the rate. Set `requests_per_second` to `-1` +to disable throttling. + +Throttling uses a wait time between batches so that the internal scroll requests +can be given a timeout that takes the request padding into account. The padding +time is the difference between the batch size divided by the +`requests_per_second` and the time spent writing. By default the batch size is +`1000`, so if `requests_per_second` is set to `500`: + +[source,txt] -------------------------------------------------- -POST twitter/_update_by_query?conflicts=proceed +target_time = 1000 / 500 per second = 2 seconds +wait_time = target_time - write_time = 2 seconds - .5 seconds = 1.5 seconds -------------------------------------------------- -// TEST[setup:twitter] -You can also limit `_update_by_query` using the -<>. This will update all documents from the -`twitter` index for the user `kimchy`: +Since the batch is issued as a single `_bulk` request, large batch sizes +cause {es} to create many requests and wait before starting the next set. +This is "bursty" instead of "smooth". + +[[docs-update-by-query-slice]] +===== Slicing + +Update by query supports <> to parallelize the +update process. This can improve efficiency and provide a +convenient way to break the request down into smaller parts. + +Setting `slices` to `auto` chooses a reasonable number for most indices. +If you're slicing manually or otherwise tuning automatic slicing, keep in mind +that: + +* Query performance is most efficient when the number of `slices` is equal to +the number of shards in the index. If that number is large (for example, +500), choose a lower number as too many `slices` hurts performance. Setting +`slices` higher than the number of shards generally does not improve efficiency +and adds overhead. + +* Update performance scales linearly across available resources with the +number of slices. + +Whether query or update performance dominates the runtime depends on the +documents being reindexed and cluster resources. + +[[docs-update-by-query-api-path-params]] +==== {api-path-parms-title} + +``:: +(Optional, string) A comma-separated list of index names to search. Use `_all` +or omit to search all indices. + +[[docs-update-by-query-api-query-params]] +==== {api-query-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=analyzer] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=analyze_wildcard] + +`conflicts`:: + (Optional, string) What to do if delete by query hits version conflicts: + `abort` or `proceed`. Defaults to `abort`. + +include::{docdir}/rest-api/common-parms.asciidoc[tag=default_operator] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=df] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] ++ +Defaults to `open`. + +include::{docdir}/rest-api/common-parms.asciidoc[tag=from] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=lenient] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=max_docs] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=pipeline] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=preference] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=search-q] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=request_cache] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=refresh] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=requests_per_second] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=routing] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=scroll] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=scroll_size] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=search_type] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=search_timeout] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=slices] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=sort] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=source] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=source_excludes] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=source_includes] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=stats] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=terminate_after] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=timeout] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=version] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] + +[[docs-update-by-query-api-request-body]] +==== {api-request-body-title} + +`query`:: + (Optional, <>) Specifies the documents to update + using the <>. + + +[[docs-update-by-query-api-response-body]] +==== Response body + +`took`:: +The number of milliseconds from start to end of the whole operation. + +`timed_out`:: +This flag is set to `true` if any of the requests executed during the +update by query execution has timed out. + +`total`:: +The number of documents that were successfully processed. + +`updated`:: +The number of documents that were successfully updated. + +`deleted`:: +The number of documents that were successfully deleted. + +`batches`:: +The number of scroll responses pulled back by the update by query. + +`version_conflicts`:: +The number of version conflicts that the update by query hit. + +`noops`:: +The number of documents that were ignored because the script used for +the update by query returned a `noop` value for `ctx.op`. + +`retries`:: +The number of retries attempted by update by query. `bulk` is the number of bulk +actions retried, and `search` is the number of search actions retried. + +`throttled_millis`:: +Number of milliseconds the request slept to conform to `requests_per_second`. + +`requests_per_second`:: +The number of requests per second effectively executed during the update by query. + +`throttled_until_millis`:: +This field should always be equal to zero in an `_update_by_query` response. It only +has meaning when using the <>, where it +indicates the next time (in milliseconds since epoch) a throttled request will be +executed again in order to conform to `requests_per_second`. + +`failures`:: +Array of failures if there were any unrecoverable errors during the process. If +this is non-empty then the request aborted because of those failures. +Update by query is implemented using batches. Any failure causes the entire +process to abort, but all failures in the current batch are collected into the +array. You can use the `conflicts` option to prevent reindex from aborting on +version conflicts. + +[[docs-update-by-query-api-example]] +==== {api-examples-title} + +The simplest usage of `_update_by_query` just performs an update on every +document in the index without changing the source. This is useful to +<> or some other online +mapping change. + +To update selected documents, specify a query in the request body: [source,console] -------------------------------------------------- @@ -85,17 +305,46 @@ POST twitter/_update_by_query?conflicts=proceed } } -------------------------------------------------- +// CONSOLE // TEST[setup:twitter] <1> The query must be passed as a value to the `query` key, in the same way as the <>. You can also use the `q` parameter in the same way as the search API. -So far we've only been updating documents without changing their source. That -is genuinely useful for things like -<> but it's only half the -fun. `_update_by_query` <> to update -the document. This will increment the `likes` field on all of kimchy's tweets: +Update documents in multiple indices: + +[source,console] +-------------------------------------------------- +POST twitter,blog/_update_by_query +-------------------------------------------------- +// CONSOLE +// TEST[s/^/PUT twitter\nPUT blog\n/] + +Limit the update by query operation to shards that a particular routing value: + +[source,console] +-------------------------------------------------- +POST twitter/_update_by_query?routing=1 +-------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] + +By default update by query uses scroll batches of 1000. +You can change the batch size with the `scroll_size` parameter: + +[source,console] +-------------------------------------------------- +POST twitter/_update_by_query?scroll_size=100 +-------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] + +[[docs-update-by-query-api-source]] +===== Update the document source + +Update by query supports scripts to update the document source. +For example, the following request increments the likes field for all of kimchy’s tweets: [source,console] -------------------------------------------------- @@ -112,64 +361,32 @@ POST twitter/_update_by_query } } -------------------------------------------------- +// CONSOLE // TEST[setup:twitter] -Just as in <> you can set `ctx.op` to change the -operation that is executed: +Note that `conflicts=proceed` is not specified in this example. In this case, a +version conflict should halt the process so you can handle the failure. + +As with the <>, you can set `ctx.op` to change the +operation that is performed: [horizontal] `noop`:: - -Set `ctx.op = "noop"` if your script decides that it doesn't have to make any -changes. That will cause `_update_by_query` to omit that document from its updates. - This no operation will be reported in the `noop` counter in the -<>. +Set `ctx.op = "noop"` if your script decides that it doesn't have to make any changes. +The update by query operation skips updating the document and increments the `noop` counter. `delete`:: +Set `ctx.op = "delete"` if your script decides that the document should be deleted. +The update by query operation deletes the document and increments the `deleted` counter. -Set `ctx.op = "delete"` if your script decides that the document must be - deleted. The deletion will be reported in the `deleted` counter in the -<>. +Update by query only supports `update`, `noop`, and `delete`. +Setting `ctx.op` to anything else is an error. Setting any other field in `ctx` is an error. +This API only enables you to modify the source of matching documents, you cannot move them. -Setting `ctx.op` to anything else is an error. Setting any -other field in `ctx` is an error. +[[docs-update-by-query-api-ingest-pipeline]] +===== Update documents using an ingest pipeline -Note that we stopped specifying `conflicts=proceed`. In this case we want a -version conflict to abort the process so we can handle the failure. - -This API doesn't allow you to move the documents it touches, just modify their -source. This is intentional! We've made no provisions for removing the document -from its original location. - -It's also possible to do this whole thing on multiple indexes at once, just -like the search API: - -[source,console] --------------------------------------------------- -POST twitter,blog/_update_by_query --------------------------------------------------- -// TEST[s/^/PUT twitter\nPUT blog\n/] - -If you provide `routing` then the routing is copied to the scroll query, -limiting the process to the shards that match that routing value: - -[source,console] --------------------------------------------------- -POST twitter/_update_by_query?routing=1 --------------------------------------------------- -// TEST[setup:twitter] - -By default `_update_by_query` uses scroll batches of 1000. You can change the -batch size with the `scroll_size` URL parameter: - -[source,console] --------------------------------------------------- -POST twitter/_update_by_query?scroll_size=100 --------------------------------------------------- -// TEST[setup:twitter] - -`_update_by_query` can also use the <> feature by -specifying a `pipeline` like this: +Update by query can use the <> feature by specifying a `pipeline`: [source,console] -------------------------------------------------- @@ -185,164 +402,13 @@ PUT _ingest/pipeline/set-foo } POST twitter/_update_by_query?pipeline=set-foo -------------------------------------------------- +// CONSOLE // TEST[setup:twitter] -[float] -==== URL Parameters - -In addition to the standard parameters like `pretty`, the Update By Query API -also supports `refresh`, `wait_for_completion`, `wait_for_active_shards`, `timeout`, -and `scroll`. - -Sending the `refresh` will update all shards in the index being updated when -the request completes. This is different than the Update API's `refresh` -parameter, which causes just the shard that received the new data to be indexed. -Also unlike the Update API it does not support `wait_for`. - -If the request contains `wait_for_completion=false` then Elasticsearch will -perform some preflight checks, launch the request, and then return a `task` -which can be used with <> -to cancel or get the status of the task. Elasticsearch will also create a -record of this task as a document at `.tasks/task/${taskId}`. This is yours -to keep or remove as you see fit. When you are done with it, delete it so -Elasticsearch can reclaim the space it uses. - -`wait_for_active_shards` controls how many copies of a shard must be active -before proceeding with the request. See <> -for details. `timeout` controls how long each write request waits for unavailable -shards to become available. Both work exactly how they work in the -<>. Because `_update_by_query` uses scroll search, you can also specify -the `scroll` parameter to control how long it keeps the "search context" alive, -e.g. `?scroll=10m`. By default it's 5 minutes. - -`requests_per_second` can be set to any positive decimal number (`1.4`, `6`, -`1000`, etc.) and throttles the rate at which `_update_by_query` issues batches of -index operations by padding each batch with a wait time. The throttling can be -disabled by setting `requests_per_second` to `-1`. - -The throttling is done by waiting between batches so that scroll that -`_update_by_query` uses internally can be given a timeout that takes into -account the padding. The padding time is the difference between the batch size -divided by the `requests_per_second` and the time spent writing. By default the -batch size is `1000`, so if the `requests_per_second` is set to `500`: - -[source,txt] --------------------------------------------------- -target_time = 1000 / 500 per second = 2 seconds -wait_time = target_time - write_time = 2 seconds - .5 seconds = 1.5 seconds --------------------------------------------------- - -Since the batch is issued as a single `_bulk` request, large batch sizes will -cause Elasticsearch to create many requests and then wait for a while before -starting the next set. This is "bursty" instead of "smooth". The default is `-1`. [float] -[[docs-update-by-query-response-body]] -==== Response body - -////////////////////////// -[source,console] --------------------------------------------------- -POST /twitter/_update_by_query?conflicts=proceed --------------------------------------------------- -// TEST[setup:twitter] - -////////////////////////// - -The JSON response looks like this: - -[source,console-result] --------------------------------------------------- -{ - "took" : 147, - "timed_out": false, - "total": 5, - "updated": 5, - "deleted": 0, - "batches": 1, - "version_conflicts": 0, - "noops": 0, - "retries": { - "bulk": 0, - "search": 0 - }, - "throttled_millis": 0, - "requests_per_second": -1.0, - "throttled_until_millis": 0, - "failures" : [ ] -} --------------------------------------------------- -// TESTRESPONSE[s/"took" : 147/"took" : "$body.took"/] - -[horizontal] -`took`:: - -The number of milliseconds from start to end of the whole operation. - -`timed_out`:: - -This flag is set to `true` if any of the requests executed during the -update by query execution has timed out. - -`total`:: - -The number of documents that were successfully processed. - -`updated`:: - -The number of documents that were successfully updated. - -`deleted`:: - -The number of documents that were successfully deleted. - -`batches`:: - -The number of scroll responses pulled back by the update by query. - -`version_conflicts`:: - -The number of version conflicts that the update by query hit. - -`noops`:: - -The number of documents that were ignored because the script used for -the update by query returned a `noop` value for `ctx.op`. - -`retries`:: - -The number of retries attempted by update by query. `bulk` is the number of bulk -actions retried, and `search` is the number of search actions retried. - -`throttled_millis`:: - -Number of milliseconds the request slept to conform to `requests_per_second`. - -`requests_per_second`:: - -The number of requests per second effectively executed during the update by query. - -`throttled_until_millis`:: - -This field should always be equal to zero in an `_update_by_query` response. It only -has meaning when using the <>, where it -indicates the next time (in milliseconds since epoch) a throttled request will be -executed again in order to conform to `requests_per_second`. - -`failures`:: - -Array of failures if there were any unrecoverable errors during the process. If -this is non-empty then the request aborted because of those failures. -Update by query is implemented using batches. Any failure causes the entire -process to abort, but all failures in the current batch are collected into the -array. You can use the `conflicts` option to prevent reindex from aborting on -version conflicts. - - - -[float] -[[docs-update-by-query-task-api]] -==== Works with the Task API +[[docs-update-by-query-fetch-tasks]] +===== Get the status of update by query operations You can fetch the status of all running update by query requests with the <>: @@ -421,7 +487,7 @@ you to delete that document. [float] [[docs-update-by-query-cancel-task-api]] -==== Works with the Cancel Task API +===== Cancel an update by query operation Any update by query can be cancelled using the <>: @@ -439,7 +505,7 @@ that it has been cancelled and terminates itself. [float] [[docs-update-by-query-rethrottle]] -==== Rethrottling +===== Change throttling for a request The value of `requests_per_second` can be changed on a running update by query using the `_rethrottle` API: @@ -458,17 +524,9 @@ query takes effect immediately, but rethrotting that slows down the query will take effect after completing the current batch. This prevents scroll timeouts. -[float] -[[docs-update-by-query-slice]] -==== Slicing - -Update by query supports <> to parallelize the updating process. -This parallelization can improve efficiency and provide a convenient way to -break the request down into smaller parts. - [float] [[docs-update-by-query-manual-slice]] -===== Manual slicing +===== Slice manually Slice an update by query manually by providing a slice id and total number of slices to each request: @@ -522,7 +580,7 @@ Which results in a sensible `total` like this one: [float] [[docs-update-by-query-automatic-slice]] -===== Automatic slicing +===== Use automatic slicing You can also let update by query automatically parallelize using <> to slice on `_id`. Use `slices` to specify the number of @@ -590,29 +648,9 @@ being updated. * Each sub-request gets a slightly different snapshot of the source index though these are all taken at approximately the same time. -[float] -[[docs-update-by-query-picking-slices]] -====== Picking the number of slices - -If slicing automatically, setting `slices` to `auto` will choose a reasonable -number for most indices. If you're slicing manually or otherwise tuning -automatic slicing, use these guidelines. - -Query performance is most efficient when the number of `slices` is equal to the -number of shards in the index. If that number is large, (for example, -500) choose a lower number as too many `slices` will hurt performance. Setting -`slices` higher than the number of shards generally does not improve efficiency -and adds overhead. - -Update performance scales linearly across available resources with the -number of slices. - -Whether query or update performance dominates the runtime depends on the -documents being reindexed and cluster resources. - [float] [[picking-up-a-new-property]] -==== Pick up a new property +===== Pick up a new property Say you created an index without dynamic mapping, filled it with data, and then added a mapping value to pick up more fields from the data: diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index 0723e91b6b5f..f70f2c938ddc 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -386,14 +386,12 @@ that match the search criteria: "max_score" : null, "hits" : [ { "_index" : "bank", - "_type" : "_doc", "_id" : "0", "sort": [0], "_score" : null, "_source" : {"account_number":0,"balance":16623,"firstname":"Bradshaw","lastname":"Mckenzie","age":29,"gender":"F","address":"244 Columbus Place","employer":"Euron","email":"bradshawmckenzie@euron.com","city":"Hobucken","state":"CO"} }, { "_index" : "bank", - "_type" : "_doc", "_id" : "1", "sort": [1], "_score" : null, diff --git a/docs/reference/how-to/recipes/stemming.asciidoc b/docs/reference/how-to/recipes/stemming.asciidoc index 462998c82b3b..fcb6c61a6551 100644 --- a/docs/reference/how-to/recipes/stemming.asciidoc +++ b/docs/reference/how-to/recipes/stemming.asciidoc @@ -88,7 +88,6 @@ GET index/_search "hits": [ { "_index": "index", - "_type": "_doc", "_id": "1", "_score": 0.18232156, "_source": { @@ -97,7 +96,6 @@ GET index/_search }, { "_index": "index", - "_type": "_doc", "_id": "2", "_score": 0.18232156, "_source": { @@ -148,7 +146,6 @@ GET index/_search "hits": [ { "_index": "index", - "_type": "_doc", "_id": "1", "_score": 0.8025915, "_source": { @@ -207,7 +204,6 @@ GET index/_search "hits": [ { "_index": "index", - "_type": "_doc", "_id": "1", "_score": 0.8025915, "_source": { diff --git a/docs/reference/how-to/search-speed.asciidoc b/docs/reference/how-to/search-speed.asciidoc index 0f3e112c1dc8..91337b0b0a1c 100644 --- a/docs/reference/how-to/search-speed.asciidoc +++ b/docs/reference/how-to/search-speed.asciidoc @@ -336,8 +336,8 @@ If the machine running Elasticsearch is restarted, the filesystem cache will be empty, so it will take some time before the operating system loads hot regions of the index into memory so that search operations are fast. You can explicitly tell the operating system which files should be loaded into memory eagerly -depending on the file extension using the <> -setting. +depending on the file extension using the +<> setting. WARNING: Loading data into the filesystem cache eagerly on too many indices or too many files will make search _slower_ if the filesystem cache is not large diff --git a/docs/reference/ilm/apis/slm-api.asciidoc b/docs/reference/ilm/apis/slm-api.asciidoc index 59c1601ab9b3..013225f9696d 100644 --- a/docs/reference/ilm/apis/slm-api.asciidoc +++ b/docs/reference/ilm/apis/slm-api.asciidoc @@ -142,6 +142,7 @@ The output looks similar to the following: "retention": {} }, "stats": { + "policy": "daily-snapshots", "snapshots_taken": 0, "snapshots_failed": 0, "snapshots_deleted": 0, @@ -184,7 +185,7 @@ To take an immediate snapshot using a policy, use the following [source,console] -------------------------------------------------- -PUT /_slm/policy/daily-snapshots/_execute +POST /_slm/policy/daily-snapshots/_execute -------------------------------------------------- // TEST[skip:we can't easily handle snapshots from docs tests] @@ -231,6 +232,7 @@ Which, in this case shows an error because the index did not exist: "retention": {} }, "stats": { + "policy": "daily-snapshots", "snapshots_taken": 0, "snapshots_failed": 1, "snapshots_deleted": 0, @@ -277,7 +279,7 @@ Another snapshot can immediately be executed to ensure the new policy works: [source,console] -------------------------------------------------- -PUT /_slm/policy/daily-snapshots/_execute +POST /_slm/policy/daily-snapshots/_execute -------------------------------------------------- // TEST[skip:we can't handle snapshots in docs tests] @@ -319,6 +321,7 @@ Which now includes the successful snapshot information: "retention": {} }, "stats": { + "policy": "daily-snapshots", "snapshots_taken": 1, "snapshots_failed": 1, "snapshots_deleted": 0, @@ -371,14 +374,15 @@ Which returns a response similar to: "retention_timed_out": 0, "retention_deletion_time": "1.4s", "retention_deletion_time_millis": 1404, - "policy_metrics": { - "daily-snapshots": { + "policy_metrics": [ + { + "policy": "daily-snapshots", "snapshots_taken": 1, "snapshots_failed": 1, "snapshots_deleted": 0, "snapshot_deletion_failures": 0 } - }, + ], "total_snapshots_taken": 1, "total_snapshots_failed": 1, "total_snapshots_deleted": 0, diff --git a/docs/reference/ilm/getting-started-slm.asciidoc b/docs/reference/ilm/getting-started-slm.asciidoc index 32a5c5ef4d89..e6f7dff2749c 100644 --- a/docs/reference/ilm/getting-started-slm.asciidoc +++ b/docs/reference/ilm/getting-started-slm.asciidoc @@ -132,7 +132,7 @@ as using the configuration from our policy right now instead of waiting for [source,console] -------------------------------------------------- -PUT /_slm/policy/nightly-snapshots/_execute +POST /_slm/policy/nightly-snapshots/_execute -------------------------------------------------- // TEST[skip:we can't easily handle snapshots from docs tests] diff --git a/docs/reference/index-modules/similarity.asciidoc b/docs/reference/index-modules/similarity.asciidoc index 3b03bcc94f13..f5a1d41f6d67 100644 --- a/docs/reference/index-modules/similarity.asciidoc +++ b/docs/reference/index-modules/similarity.asciidoc @@ -260,7 +260,6 @@ Which yields: "_shard": "[index][0]", "_node": "OzrdjxNtQGaqs4DmioFw9A", "_index": "index", - "_type": "_doc", "_id": "1", "_score": 1.9508477, "_source": { @@ -433,7 +432,6 @@ GET /index/_search?explain=true "_shard": "[index][0]", "_node": "OzrdjxNtQGaqs4DmioFw9A", "_index": "index", - "_type": "_doc", "_id": "1", "_score": 1.9508477, "_source": { diff --git a/docs/reference/index-modules/store.asciidoc b/docs/reference/index-modules/store.asciidoc index 9bd25968e22f..fd86b29df679 100644 --- a/docs/reference/index-modules/store.asciidoc +++ b/docs/reference/index-modules/store.asciidoc @@ -83,7 +83,8 @@ setting is useful, for example, if you are in an environment where you can not control the ability to create a lot of memory maps so you need disable the ability to use memory-mapping. -=== Pre-loading data into the file system cache +[[preload-data-to-file-system-cache]] +=== Preloading data into the file system cache NOTE: This is an expert setting, the details of which may change in the future. diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index e77886767ba7..6c3fe836b952 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -16,8 +16,6 @@ include::getting-started.asciidoc[] include::setup.asciidoc[] -include::monitoring/configuring-monitoring.asciidoc[] - include::setup/bootstrap-checks-xes.asciidoc[] include::upgrade.asciidoc[] @@ -46,10 +44,10 @@ include::sql/index.asciidoc[] include::monitoring/index.asciidoc[] -include::rollup/index.asciidoc[] - include::frozen-indices.asciidoc[] +include::data-rollup-transform.asciidoc[] + include::high-availability.asciidoc[] include::security/index.asciidoc[] diff --git a/docs/reference/indices.asciidoc b/docs/reference/indices.asciidoc index 3fd464027a40..0fad5afc514e 100644 --- a/docs/reference/indices.asciidoc +++ b/docs/reference/indices.asciidoc @@ -29,7 +29,6 @@ index settings, aliases, mappings, and index templates. * <> * <> * <> -* <> [float] [[alias-management]] @@ -102,8 +101,6 @@ include::indices/get-mapping.asciidoc[] include::indices/get-field-mapping.asciidoc[] -include::indices/types-exists.asciidoc[] - include::indices/add-alias.asciidoc[] include::indices/delete-alias.asciidoc[] diff --git a/docs/reference/indices/clone-index.asciidoc b/docs/reference/indices/clone-index.asciidoc index d7eb2d209134..e9c9dd2796a9 100644 --- a/docs/reference/indices/clone-index.asciidoc +++ b/docs/reference/indices/clone-index.asciidoc @@ -1,12 +1,60 @@ [[indices-clone-index]] -=== Clone Index +=== Clone index API +++++ +Clone index +++++ -The clone index API allows you to clone an existing index into a new index, -where each original primary shard is cloned into a new primary shard in -the new index. +Clones an existing index. -[float] -==== How does cloning work? +[source,console] +-------------------------------------------------- +POST /twitter/_clone/cloned-twitter-index +-------------------------------------------------- +// TEST[s/^/PUT twitter\n{"settings":{"index.number_of_shards" : 5,"blocks.write":true}}\n/] + + +[[clone-index-api-request]] +==== {api-request-title} + +`POST //_clone/` + +`PUT //_clone/` + + +[[clone-index-api-prereqs]] +==== {api-prereq-title} + +To clone an index, +the index must be marked as read-only +and have a <> status of `green`. + +For example, +the following request prevents write operations on `my_source_index` +so it can be cloned. +Metadata changes like deleting the index are still allowed. + +[source,console] +-------------------------------------------------- +PUT /my_source_index/_settings +{ + "settings": { + "index.blocks.write": true + } +} +-------------------------------------------------- +// TEST[s/^/PUT my_source_index\n/] + + +[[clone-index-api-desc]] +==== {api-description-title} + +Use the clone index API +to clone an existing index into a new index, +where each original primary shard is cloned +into a new primary shard in the new index. + +[[cloning-works]] +===== How cloning works Cloning works as follows: @@ -20,49 +68,15 @@ Cloning works as follows: * Finally, it recovers the target index as though it were a closed index which had just been re-opened. -[float] -==== Preparing an index for cloning - -Create a new index: - -[source,console] --------------------------------------------------- -PUT my_source_index -{ - "settings": { - "index.number_of_shards" : 5 - } -} --------------------------------------------------- - -In order to clone an index, the index must be marked as read-only, -and have <> `green`. - -This can be achieved with the following request: - -[source,console] --------------------------------------------------- -PUT /my_source_index/_settings -{ - "settings": { - "index.blocks.write": true <1> - } -} --------------------------------------------------- -// TEST[continued] - -<1> Prevents write operations to this index while still allowing metadata - changes like deleting the index. - -[float] -==== Cloning an index +[[clone-index]] +===== Clone an index To clone `my_source_index` into a new index called `my_target_index`, issue the following request: [source,console] -------------------------------------------------- -POST my_source_index/_clone/my_target_index +POST /my_source_index/_clone/my_target_index -------------------------------------------------- // TEST[continued] @@ -72,9 +86,9 @@ the cluster state -- it doesn't wait for the clone operation to start. [IMPORTANT] ===================================== -Indices can only be cloned if they satisfy the following requirements: +Indices can only be cloned if they meet the following requirements: -* the target index must not exist +* The target index must not exist. * The source index must have the same number of primary shards as the target index. @@ -88,7 +102,7 @@ and accepts `settings` and `aliases` parameters for the target index: [source,console] -------------------------------------------------- -POST my_source_index/_clone/my_target_index +POST /my_source_index/_clone/my_target_index { "settings": { "index.number_of_shards": 5 <1> @@ -107,10 +121,10 @@ POST my_source_index/_clone/my_target_index NOTE: Mappings may not be specified in the `_clone` request. The mappings of the source index will be used for the target index. -[float] -==== Monitoring the clone process +[[monitor-cloning]] +===== Monitor the cloning process -The clone process can be monitored with the <>, or the <> can be used to wait until all primary shards have been allocated by setting the `wait_for_status` parameter to `yellow`. @@ -123,12 +137,38 @@ can be allocated on that node. Once the primary shard is allocated, it moves to state `initializing`, and the clone process begins. When the clone operation completes, the shard will -become `active`. At that point, Elasticsearch will try to allocate any +become `active`. At that point, {es} will try to allocate any replicas and may decide to relocate the primary shard to another node. -[float] -==== Wait For Active Shards +[[clone-wait-active-shards]] +===== Wait for active shards Because the clone operation creates a new index to clone the shards to, the <> setting on index creation applies to the clone index action as well. + + +[[clone-index-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +Name of the source index to clone. + +include::{docdir}/rest-api/common-parms.asciidoc[tag=target-index] + + +[[clone-index-api-query-params]] +==== {api-query-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=timeoutparms] + + +[[clone-index-api-request-body]] +==== {api-request-body-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=target-index-aliases] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=target-index-settings] diff --git a/docs/reference/indices/create-index.asciidoc b/docs/reference/indices/create-index.asciidoc index fef6ff96a523..afb7ab91232b 100644 --- a/docs/reference/indices/create-index.asciidoc +++ b/docs/reference/indices/create-index.asciidoc @@ -35,6 +35,7 @@ creating an index, you can specify the following: -- (Optional, string) Name of the index you wish to create. +// tag::index-name-reqs[] Index names must meet the following criteria: - Lowercase only @@ -43,6 +44,7 @@ Index names must meet the following criteria: - Cannot start with `-`, `_`, `+` - Cannot be `.` or `..` - Cannot be longer than 255 bytes (note it is bytes, so multi-byte characters will count towards the 255 limit faster) +// end::index-name-reqs[] -- diff --git a/docs/reference/indices/rollover-index.asciidoc b/docs/reference/indices/rollover-index.asciidoc index 8372de550242..294c38790e1b 100644 --- a/docs/reference/indices/rollover-index.asciidoc +++ b/docs/reference/indices/rollover-index.asciidoc @@ -1,5 +1,37 @@ [[indices-rollover-index]] -=== Rollover Index +=== Rollover index API +++++ +Rollover index +++++ + +Assigns an <> to a new index +when the alias's existing index meets a condition you provide. + +[source,console] +---- +POST /alias1/_rollover/twitter +{ + "conditions": { + "max_age": "7d", + "max_docs": 1000, + "max_size": "5gb" + } +} +---- +// TEST[s/^/PUT my_old_index_name\nPUT my_old_index_name\/_alias\/alias1\n/] + + +[[rollover-index-api-request]] +==== {api-request-title} + + +`POST //_rollover/` + +`POST //_rollover/` + + +[[rollover-index-api-desc]] +==== {api-description-title} The rollover index API rolls an <> to a new index when the existing index meets a condition you provide. You can use this API to retire @@ -24,17 +56,102 @@ from the original (rolled-over) index. In this scenario, the write index will have its rollover alias' `is_write_index` set to `false`, while the newly created index will now have the rollover alias pointing to it as the write index with `is_write_index` as `true`. -The available conditions are: -[[index-rollover-conditions]] -.`conditions` parameters -[options="header"] -|=== -| Name | Description -| max_age | The maximum age of the index -| max_docs | The maximum number of documents the index should contain. This does not add documents multiple times for replicas -| max_size | The maximum estimated size of the primary shard of the index -|=== +[[rollover-wait-active-shards]] +===== Wait for active shards + +Because the rollover operation creates a new index to rollover to, the +<> setting on +index creation applies to the rollover action. + + +[[rollover-index-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +Name of the existing index alias +to assign to the target index. + + +``:: ++ +-- +(Optional*, string) +Name of the target index to create and assign the index alias. + +include::{docdir}/indices/create-index.asciidoc[tag=index-name-reqs] + +*This parameter is not required +if the alias is assigned to an index name that ends with `-` and a number, +such as `logs-000001`. +In this case, +the name of the new index follows the same pattern, +incrementing the number. +For example, +`logs-000001` increments to `logs-000002`. +This number is zero-padded with a length of 6, +regardless of the prior index name. + +If the existing index for the alias does not match this pattern, +this parameter is required. +-- + + +[[rollover-index-api-query-params]] +==== {api-query-parms-title} + +`dry_run`:: +(Optional, boolean) +If `true`, +the request checks whether the index matches provided conditions +but does not perform a rollover. +Defaults to `false`. + +include::{docdir}/rest-api/common-parms.asciidoc[tag=include-type-name] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=wait_for_active_shards] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=timeoutparms] + + +[[rollover-index-api-request-body]] +==== {api-request-body-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=aliases] + +`conditions`:: ++ +-- +(Required, object) +Set of conditions the index alias's existing index must met to roll over. + +Parameters include: + +`max_age`:: +(Optional, <>) +Maximum age of the index. + +`max_docs`:: +(Optional, integer) +Maximum number of documents in the index. +This number does *not* include documents in replica shards. + +`max_size`:: +(Optional, <>) +Maximum estimated size of the primary shard of the index. +-- + +include::{docdir}/rest-api/common-parms.asciidoc[tag=mappings] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=settings] + + +[[rollover-index-api-example]] +==== {api-examples-title} + +[[rollover-index-basic-ex]] +===== Basic example [source,console] -------------------------------------------------- @@ -63,7 +180,7 @@ POST /logs_write/_rollover <2> contains 1,000 or more documents, or has an index size at least around 5GB, then the `logs-000002` index is created and the `logs_write` alias is updated to point to `logs-000002`. -The above request might return the following response: +The API returns the following response: [source,console-result] -------------------------------------------------- @@ -86,8 +203,41 @@ The above request might return the following response: <2> Whether the rollover was dry run. <3> The result of each condition. -[float] -==== Naming the new index +[[rollover-index-settings-ex]] +===== Specify settings for the target index + +The settings, mappings, and aliases for the new index are taken from any +matching <>. Additionally, you can specify +`settings`, `mappings`, and `aliases` in the body of the request, just like the +<> API. Values specified in the request +override any values set in matching index templates. For example, the following +`rollover` request overrides the `index.number_of_shards` setting: + +[source,console] +-------------------------------------------------- +PUT /logs-000001 +{ + "aliases": { + "logs_write": {} + } +} + +POST /logs_write/_rollover +{ + "conditions" : { + "max_age": "7d", + "max_docs": 1000, + "max_size": "5gb" + }, + "settings": { + "index.number_of_shards": 2 + } +} +-------------------------------------------------- + + +[[rollover-index-specify-index-ex]] +===== Specify a target index name If the name of the existing index ends with `-` and a number -- e.g. `logs-000001` -- then the name of the new index will follow the same pattern, @@ -110,8 +260,9 @@ POST /my_alias/_rollover/my_new_index_name -------------------------------------------------- // TEST[s/^/PUT my_old_index_name\nPUT my_old_index_name\/_alias\/my_alias\n/] -[float] -==== Using date math with the rollover API + +[[_using_date_math_with_the_rollover_api]] +===== Use date math with a rollover It can be useful to use <> to name the rollover index according to the date that the index rolled over, e.g. @@ -187,53 +338,15 @@ GET /%3Clogs-%7Bnow%2Fd%7D-*%3E%2C%3Clogs-%7Bnow%2Fd-1d%7D-*%3E%2C%3Clogs-%7Bnow // TEST[continued] // TEST[s/now/2016.10.31||/] -[float] -==== Defining the new index -The settings, mappings, and aliases for the new index are taken from any -matching <>. Additionally, you can specify -`settings`, `mappings`, and `aliases` in the body of the request, just like the -<> API. Values specified in the request -override any values set in matching index templates. For example, the following -`rollover` request overrides the `index.number_of_shards` setting: - -[source,console] --------------------------------------------------- -PUT /logs-000001 -{ - "aliases": { - "logs_write": {} - } -} - -POST /logs_write/_rollover -{ - "conditions" : { - "max_age": "7d", - "max_docs": 1000, - "max_size": "5gb" - }, - "settings": { - "index.number_of_shards": 2 - } -} --------------------------------------------------- - -[float] -==== Dry run +[[rollover-index-api-dry-run-ex]] +===== Dry run The rollover API supports `dry_run` mode, where request conditions can be -checked without performing the actual rollover: +checked without performing the actual rollover. [source,console] -------------------------------------------------- -PUT /logs-000001 -{ - "aliases": { - "logs_write": {} - } -} - POST /logs_write/_rollover?dry_run { "conditions" : { @@ -243,17 +356,11 @@ POST /logs_write/_rollover?dry_run } } -------------------------------------------------- +// TEST[s/^/PUT logs-000001\nPUT logs-000001\/_alias\/logs_write\n/] -[float] -==== Wait For Active Shards - -Because the rollover operation creates a new index to rollover to, the -<> setting on -index creation applies to the rollover action as well. [[indices-rollover-is-write-index]] -[float] -==== Write Index Alias Behavior +===== Roll over a write index The rollover alias when rolling over a write index that has `is_write_index` explicitly set to `true` is not swapped during rollover actions. Since having an alias point to multiple indices is ambiguous in distinguishing diff --git a/docs/reference/indices/types-exists.asciidoc b/docs/reference/indices/types-exists.asciidoc deleted file mode 100644 index 2a5fc3f4c1ef..000000000000 --- a/docs/reference/indices/types-exists.asciidoc +++ /dev/null @@ -1,16 +0,0 @@ -[[indices-types-exists]] -=== Types Exists - -deprecated[7.0.0, Types are deprecated and are in the process of being removed. See <>.] - -Used to check if a type/types exists in an index/indices. - -[source,console] --------------------------------------------------- -HEAD twitter/_mapping/tweet --------------------------------------------------- -// TEST[setup:twitter] -// TEST[warning:Type exists requests are deprecated, as types have been deprecated.] - -The HTTP status code indicates if the type exists or not. A `404` means -it does not exist, and `200` means it does. diff --git a/docs/reference/ingest/apis/get-pipeline.asciidoc b/docs/reference/ingest/apis/get-pipeline.asciidoc index 887ae870f525..b046b7886683 100644 --- a/docs/reference/ingest/apis/get-pipeline.asciidoc +++ b/docs/reference/ingest/apis/get-pipeline.asciidoc @@ -1,70 +1,16 @@ [[get-pipeline-api]] -=== Get Pipeline API +=== Get pipeline API +++++ +Get pipeline +++++ -The get pipeline API returns pipelines based on ID. This API always returns a local reference of the pipeline. - -////////////////////////// +Returns information about one or more ingest pipelines. +This API returns a local reference of the pipeline. +//// [source,console] --------------------------------------------------- -PUT _ingest/pipeline/my-pipeline-id -{ - "description" : "describe pipeline", - "processors" : [ - { - "set" : { - "field": "foo", - "value": "bar" - } - } - ] -} --------------------------------------------------- - -////////////////////////// - -[source,console] --------------------------------------------------- -GET _ingest/pipeline/my-pipeline-id --------------------------------------------------- -// TEST[continued] - -Example response: - -[source,console-result] --------------------------------------------------- -{ - "my-pipeline-id" : { - "description" : "describe pipeline", - "processors" : [ - { - "set" : { - "field" : "foo", - "value" : "bar" - } - } - ] - } -} --------------------------------------------------- - -For each returned pipeline, the source and the version are returned. -The version is useful for knowing which version of the pipeline the node has. -You can specify multiple IDs to return more than one pipeline. Wildcards are also supported. - -[float] -[[versioning-pipelines]] -==== Pipeline Versioning - -Pipelines can optionally add a `version` number, which can be any integer value, -in order to simplify pipeline management by external systems. The `version` -field is completely optional and it is meant solely for external management of -pipelines. To unset a `version`, simply replace the pipeline without specifying -one. - -[source,console] --------------------------------------------------- -PUT _ingest/pipeline/my-pipeline-id +---- +PUT /_ingest/pipeline/my-pipeline-id { "description" : "describe pipeline", "version" : 123, @@ -77,42 +23,114 @@ PUT _ingest/pipeline/my-pipeline-id } ] } --------------------------------------------------- - -To check for the `version`, you can -<> -using `filter_path` to limit the response to just the `version`: +---- +//// [source,console] --------------------------------------------------- -GET /_ingest/pipeline/my-pipeline-id?filter_path=*.version --------------------------------------------------- +---- +GET /_ingest/pipeline/my-pipeline-id +---- // TEST[continued] -This should give a small response that makes it both easy and inexpensive to parse: + + +[[get-pipeline-api-request]] +==== {api-request-title} + +`GET /_ingest/pipeline/` + +`GET /_ingest/pipeline` + + +[[get-pipeline-api-path-params]] +==== {api-path-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=path-pipeline] + + + +[[get-pipeline-api-query-params]] +==== {api-query-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=master-timeout] + + +[[get-pipeline-api-api-example]] +==== {api-examples-title} + + +[[get-pipeline-api-specific-ex]] +===== Get information for a specific ingest pipeline + +[source,console] +---- +GET /_ingest/pipeline/my-pipeline-id +---- +// TEST[continued] + +The API returns the following response: [source,console-result] --------------------------------------------------- +---- +{ + "my-pipeline-id" : { + "description" : "describe pipeline", + "version" : 123, + "processors" : [ + { + "set" : { + "field" : "foo", + "value" : "bar" + } + } + ] + } +} +---- + + +[[get-pipeline-api-version-ex]] +===== Get the version of an ingest pipeline + +When you create or update an ingest pipeline, +you can specify an optional `version` parameter. +The version is useful for managing changes to pipeline +and viewing the current pipeline for an ingest node. + + +To check the pipeline version, +use the `filter_path` query parameter +to <> +to only the version. + +[source,console] +---- +GET /_ingest/pipeline/my-pipeline-id?filter_path=*.version +---- +// TEST[continued] + +The API returns the following response: + +[source,console-result] +---- { "my-pipeline-id" : { "version" : 123 } } --------------------------------------------------- - -////////////////////////// +---- +//// [source,console] --------------------------------------------------- +---- DELETE /_ingest/pipeline/my-pipeline-id --------------------------------------------------- +---- // TEST[continued] [source,console-result] --------------------------------------------------- +---- { "acknowledged": true } --------------------------------------------------- - -////////////////////////// +---- +//// diff --git a/docs/reference/ingest/apis/put-pipeline.asciidoc b/docs/reference/ingest/apis/put-pipeline.asciidoc index fe7b388b9ab6..fd42b3c97c8a 100644 --- a/docs/reference/ingest/apis/put-pipeline.asciidoc +++ b/docs/reference/ingest/apis/put-pipeline.asciidoc @@ -19,6 +19,51 @@ PUT _ingest/pipeline/my-pipeline-id } -------------------------------------------------- +[float] +[[versioning-pipelines]] +==== Pipeline versioning + +Pipelines can optionally add a `version` number, which can be any integer value, +in order to simplify pipeline management by external systems. The `version` +field is completely optional and it is meant solely for external management of +pipelines. + +[source,console] +-------------------------------------------------- +PUT /_ingest/pipeline/my-pipeline-id +{ + "description" : "describe pipeline", + "version" : 123, + "processors" : [ + { + "set" : { + "field": "foo", + "value": "bar" + } + } + ] +} +-------------------------------------------------- + +To unset a `version`, simply replace the pipeline without specifying +one. + +[source,console] +-------------------------------------------------- +PUT /_ingest/pipeline/my-pipeline-id +{ + "description" : "describe pipeline", + "processors" : [ + { + "set" : { + "field": "foo", + "value": "bar" + } + } + ] +} +-------------------------------------------------- + ////////////////////////// [source,console] diff --git a/docs/reference/ingest/processors/geoip.asciidoc b/docs/reference/ingest/processors/geoip.asciidoc index 3ece7676e03e..58cc32d62976 100644 --- a/docs/reference/ingest/processors/geoip.asciidoc +++ b/docs/reference/ingest/processors/geoip.asciidoc @@ -266,7 +266,6 @@ GET /my_ip_locations/_search "hits" : [ { "_index" : "my_ip_locations", - "_type" : "_doc", "_id" : "1", "_score" : 1.0, "_source" : { diff --git a/docs/reference/mapping/fields/field-names-field.asciidoc b/docs/reference/mapping/fields/field-names-field.asciidoc index 5c03da1124f6..999d3f7049df 100644 --- a/docs/reference/mapping/fields/field-names-field.asciidoc +++ b/docs/reference/mapping/fields/field-names-field.asciidoc @@ -14,23 +14,9 @@ be available but will not use the `_field_names` field. [[disable-field-names]] ==== Disabling `_field_names` -NOTE: Disabling `_field_names` has been deprecated and will be removed in a future major version. +Disabling `_field_names` is no longer possible. It is now enabled by default +because it no longer carries the index overhead it once did. -Disabling `_field_names` is usually not necessary because it no longer -carries the index overhead it once did. If you have a lot of fields -which have `doc_values` and `norms` disabled and you do not need to -execute `exists` queries using those fields you might want to disable -`_field_names` by adding the following to the mappings: - -[source,console] --------------------------------------------------- -PUT tweets -{ - "mappings": { - "_field_names": { - "enabled": false - } - } -} --------------------------------------------------- -// TEST[warning:Index [tweets] uses the deprecated `enabled` setting for `_field_names`. Disabling _field_names is not necessary because it no longer carries a large index overhead. Support for this setting will be removed in a future major version. Please remove it from your mappings and templates.] +NOTE: Support for disabling `_field_names` has been removed. Using it on new +indices will throw an error. Using it in pre-8.0 indices is still allowed but +issues a deprecation warning. diff --git a/docs/reference/mapping/params/eager-global-ordinals.asciidoc b/docs/reference/mapping/params/eager-global-ordinals.asciidoc index 49c5e5fce28f..a7b6d748bd11 100644 --- a/docs/reference/mapping/params/eager-global-ordinals.asciidoc +++ b/docs/reference/mapping/params/eager-global-ordinals.asciidoc @@ -26,9 +26,9 @@ ordinal for each segment. Global ordinals are used if a search contains any of the following components: -* Bucket aggregations on `keyword` and `flattened` fields. This includes -`terms` aggregations as mentioned above, as well as `composite`, `sampler`, -and `significant_terms`. +* Certain bucket aggregations on `keyword`, `ip`, and `flattened` fields. This +includes `terms` aggregations as mentioned above, as well as `composite`, +`diversified_sampler`, and `significant_terms`. * Bucket aggregations on `text` fields that require <> to be enabled. * Operations on parent and child documents from a `join` field, including diff --git a/docs/reference/mapping/params/normalizer.asciidoc b/docs/reference/mapping/params/normalizer.asciidoc index b85c45fd807e..1e7e6870c302 100644 --- a/docs/reference/mapping/params/normalizer.asciidoc +++ b/docs/reference/mapping/params/normalizer.asciidoc @@ -94,7 +94,6 @@ both index and query time. "hits": [ { "_index": "index", - "_type": "_doc", "_id": "1", "_score": 0.47000363, "_source": { @@ -103,7 +102,6 @@ both index and query time. }, { "_index": "index", - "_type": "_doc", "_id": "2", "_score": 0.47000363, "_source": { diff --git a/docs/reference/mapping/types/parent-join.asciidoc b/docs/reference/mapping/types/parent-join.asciidoc index 292e5b7b468e..35b674bd1c61 100644 --- a/docs/reference/mapping/types/parent-join.asciidoc +++ b/docs/reference/mapping/types/parent-join.asciidoc @@ -179,7 +179,6 @@ Will return: "hits": [ { "_index": "my_index", - "_type": "_doc", "_id": "1", "_score": null, "_source": { @@ -192,7 +191,6 @@ Will return: }, { "_index": "my_index", - "_type": "_doc", "_id": "2", "_score": null, "_source": { @@ -205,7 +203,6 @@ Will return: }, { "_index": "my_index", - "_type": "_doc", "_id": "3", "_score": null, "_routing": "1", @@ -222,7 +219,6 @@ Will return: }, { "_index": "my_index", - "_type": "_doc", "_id": "4", "_score": null, "_routing": "1", diff --git a/docs/reference/mapping/types/percolator.asciidoc b/docs/reference/mapping/types/percolator.asciidoc index cd958c6fe234..981ba08c4fa4 100644 --- a/docs/reference/mapping/types/percolator.asciidoc +++ b/docs/reference/mapping/types/percolator.asciidoc @@ -199,7 +199,6 @@ now returns matches from the new index: "hits": [ { "_index": "new_index", <1> - "_type": "_doc", "_id": "1", "_score": 0.13076457, "_source": { @@ -390,7 +389,6 @@ This results in a response like this: "hits": [ { "_index": "test_index", - "_type": "_doc", "_id": "1", "_score": 0.13076457, "_source": { @@ -547,7 +545,6 @@ GET /my_queries1/_search "hits": [ { "_index": "my_queries1", - "_type": "_doc", "_id": "1", "_score": 0.18864399, "_source": { diff --git a/docs/reference/mapping/types/range.asciidoc b/docs/reference/mapping/types/range.asciidoc index 186096c0e9db..f0ee2879cfec 100644 --- a/docs/reference/mapping/types/range.asciidoc +++ b/docs/reference/mapping/types/range.asciidoc @@ -94,7 +94,6 @@ The result produced by the above query. "hits" : [ { "_index" : "range_index", - "_type" : "_doc", "_id" : "1", "_score" : 1.0, "_source" : { @@ -156,7 +155,6 @@ This query produces a similar result: "hits" : [ { "_index" : "range_index", - "_type" : "_doc", "_id" : "1", "_score" : 1.0, "_source" : { diff --git a/docs/reference/mapping/types/search-as-you-type.asciidoc b/docs/reference/mapping/types/search-as-you-type.asciidoc index 12e608fed878..1edf40894a9c 100644 --- a/docs/reference/mapping/types/search-as-you-type.asciidoc +++ b/docs/reference/mapping/types/search-as-you-type.asciidoc @@ -122,7 +122,6 @@ GET my_index/_search "hits" : [ { "_index" : "my_index", - "_type" : "_doc", "_id" : "1", "_score" : 0.8630463, "_source" : { diff --git a/docs/reference/migration/migrate_8_0/mappings.asciidoc b/docs/reference/migration/migrate_8_0/mappings.asciidoc index 16e75473885c..8c6fa75aad5e 100644 --- a/docs/reference/migration/migrate_8_0/mappings.asciidoc +++ b/docs/reference/migration/migrate_8_0/mappings.asciidoc @@ -15,6 +15,13 @@ The number of completion contexts within a single completion field has been limited to 10. +[float] +==== Removal of types + +The typed REST endpoints of the Put Mapping, Get Mapping and Get Field mapping +APIs have been removed in favour of their typeless REST endpoints, since indexes +no longer contain types, these typed endpoints are obsolete. + [float] ==== Defining multi-fields within multi-fields @@ -22,4 +29,13 @@ Previously, it was possible to define a multi-field within a multi-field. Defining chained multi-fields was deprecated in 7.3 and is now no longer supported. To migrate the mappings, all instances of `fields` that occur within a `fields` block should be removed, either by flattening the chained `fields` -blocks into a single level, or by switching to `copy_to` if appropriate. \ No newline at end of file +blocks into a single level, or by switching to `copy_to` if appropriate. + +[float] +[[fieldnames-enabling]] +==== Disallow use of the `enabled` setting on the `_field_names` field + +The setting has been deprecated with 7.5 and is no longer supported on new indices. +Mappings for older indices will continue to work but emit a deprecation warning. +The `enabled` setting for `_field_names` should be removed from templates and mappings. +Disabling _field_names is not necessary because it no longer carries a large index overhead. diff --git a/docs/reference/modules/cross-cluster-search.asciidoc b/docs/reference/modules/cross-cluster-search.asciidoc index 0027ee1af938..e85fe9994468 100644 --- a/docs/reference/modules/cross-cluster-search.asciidoc +++ b/docs/reference/modules/cross-cluster-search.asciidoc @@ -99,7 +99,6 @@ The API returns the following response: "hits": [ { "_index": "cluster_one:twitter", <1> - "_type": "_doc", "_id": "0", "_score": 1, "_source": { @@ -171,7 +170,6 @@ The API returns the following response: "hits": [ { "_index": "twitter", <1> - "_type": "_doc", "_id": "0", "_score": 2, "_source": { @@ -183,7 +181,6 @@ The API returns the following response: }, { "_index": "cluster_one:twitter", <2> - "_type": "_doc", "_id": "0", "_score": 1, "_source": { @@ -195,7 +192,6 @@ The API returns the following response: }, { "_index": "cluster_two:twitter", <3> - "_type": "_doc", "_id": "0", "_score": 1, "_source": { diff --git a/docs/reference/monitoring/collecting-monitoring-data.asciidoc b/docs/reference/monitoring/collecting-monitoring-data.asciidoc index 3d5a85b3012a..a2c95014b33a 100644 --- a/docs/reference/monitoring/collecting-monitoring-data.asciidoc +++ b/docs/reference/monitoring/collecting-monitoring-data.asciidoc @@ -1,10 +1,7 @@ [role="xpack"] [testenv="gold"] [[collecting-monitoring-data]] -=== Collecting monitoring data -++++ -Collecting monitoring data -++++ +== Collecting monitoring data If you enable the Elastic {monitor-features} in your cluster, you can optionally collect metrics about {es}. By default, monitoring is enabled but diff --git a/docs/reference/monitoring/collectors.asciidoc b/docs/reference/monitoring/collectors.asciidoc index 64d56e81cdc8..568d21e83547 100644 --- a/docs/reference/monitoring/collectors.asciidoc +++ b/docs/reference/monitoring/collectors.asciidoc @@ -110,7 +110,7 @@ For more information about the configuration options for the collectors, see [float] [[es-monitoring-stack]] -=== Collecting data from across the Elastic Stack +==== Collecting data from across the Elastic Stack {monitoring} in {es} also receives monitoring data from other parts of the Elastic Stack. In this way, it serves as an unscheduled monitoring data diff --git a/docs/reference/monitoring/configuring-filebeat.asciidoc b/docs/reference/monitoring/configuring-filebeat.asciidoc index fd77dc860ce8..b1e22d38f54a 100644 --- a/docs/reference/monitoring/configuring-filebeat.asciidoc +++ b/docs/reference/monitoring/configuring-filebeat.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[configuring-filebeat]] -=== Collecting {es} log data with {filebeat} +== Collecting {es} log data with {filebeat} [subs="attributes"] ++++ diff --git a/docs/reference/monitoring/configuring-metricbeat.asciidoc b/docs/reference/monitoring/configuring-metricbeat.asciidoc index 34f027b15539..ea3aecfac2a0 100644 --- a/docs/reference/monitoring/configuring-metricbeat.asciidoc +++ b/docs/reference/monitoring/configuring-metricbeat.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="gold"] [[configuring-metricbeat]] -=== Collecting {es} monitoring data with {metricbeat} +== Collecting {es} monitoring data with {metricbeat} [subs="attributes"] ++++ diff --git a/docs/reference/monitoring/configuring-monitoring.asciidoc b/docs/reference/monitoring/configuring-monitoring.asciidoc deleted file mode 100644 index e129999e3a51..000000000000 --- a/docs/reference/monitoring/configuring-monitoring.asciidoc +++ /dev/null @@ -1,23 +0,0 @@ -[role="xpack"] -[testenv="gold"] -[[configuring-monitoring]] -== Configuring monitoring in {es} -++++ -Configuring monitoring -++++ - -If you enable the Elastic {monitor-features} in your cluster, there are two -methods to collect metrics about {es}: - -* <> -* <> - -You can also <>. - -To learn about monitoring in general, see -{stack-ov}/xpack-monitoring.html[Monitoring the {stack}]. - -include::collecting-monitoring-data.asciidoc[] -include::configuring-metricbeat.asciidoc[] -include::configuring-filebeat.asciidoc[] -include::indices.asciidoc[] \ No newline at end of file diff --git a/docs/reference/monitoring/exporters.asciidoc b/docs/reference/monitoring/exporters.asciidoc index fee09015dbbd..742b24608f25 100644 --- a/docs/reference/monitoring/exporters.asciidoc +++ b/docs/reference/monitoring/exporters.asciidoc @@ -158,6 +158,3 @@ which is used to determine whether the resource should be replaced. The `version field value represents the latest version of {monitoring} that changed the resource. If a resource is edited by someone or something external to {monitoring}, those changes are lost the next time an automatic update occurs. - -include::local-export.asciidoc[] -include::http-export.asciidoc[] diff --git a/docs/reference/monitoring/how-monitoring-works.asciidoc b/docs/reference/monitoring/how-monitoring-works.asciidoc new file mode 100644 index 000000000000..283ed0412be3 --- /dev/null +++ b/docs/reference/monitoring/how-monitoring-works.asciidoc @@ -0,0 +1,39 @@ +[role="xpack"] +[testenv="basic"] +[[how-monitoring-works]] +== How monitoring works +++++ +How it works +++++ + +Each {es} node, {ls} node, {kib} instance, and Beat is considered unique in the +cluster based on its persistent UUID, which is written to the +<> directory when the node or instance starts. + +Monitoring documents are just ordinary JSON documents built by monitoring each +{stack} component at a specified collection interval. If you want to alter the +templates for these indices, see <>. + +Each component in the {stack} is responsible for monitoring itself and then +forwarding those documents to the production cluster for both routing and +indexing (storage). The routing and indexing processes in {es} are handled by +what are called <> and +<>. + +Alternatively, you can use {metricbeat} to collect monitoring data and ship it +directly to the monitoring cluster. + +To learn how to collect monitoring data, see: + +* <> +* <> +* {kibana-ref}/xpack-monitoring.html[Monitoring {kib}] +* {logstash-ref}/monitoring-logstash.html[Monitoring {ls}] +* Monitoring Beats: +** {auditbeat-ref}/monitoring.html[{auditbeat}] +** {filebeat-ref}/monitoring.html[{filebeat}] +** {functionbeat-ref}/monitoring.html[{functionbeat}] +** {heartbeat-ref}/monitoring.html[{heartbeat}] +** {metricbeat-ref}/monitoring.html[{metricbeat}] +** {packetbeat-ref}/monitoring.html[{packetbeat}] +** {winlogbeat-ref}/monitoring.html[{winlogbeat}] diff --git a/docs/reference/monitoring/http-export.asciidoc b/docs/reference/monitoring/http-export.asciidoc index a875e5a0169e..eaca9904d04d 100644 --- a/docs/reference/monitoring/http-export.asciidoc +++ b/docs/reference/monitoring/http-export.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[http-exporter]] -=== HTTP Exporters +=== HTTP exporters The `http` exporter is the preferred exporter in {monitoring} because it enables the use of a separate monitoring cluster. As a secondary benefit, it avoids diff --git a/docs/reference/monitoring/images/architecture.png b/docs/reference/monitoring/images/architecture.png new file mode 100644 index 000000000000..769618c0ccc6 Binary files /dev/null and b/docs/reference/monitoring/images/architecture.png differ diff --git a/docs/reference/monitoring/index.asciidoc b/docs/reference/monitoring/index.asciidoc index fbda72e0f979..937a01340b38 100644 --- a/docs/reference/monitoring/index.asciidoc +++ b/docs/reference/monitoring/index.asciidoc @@ -1,54 +1,39 @@ [role="xpack"] [testenv="basic"] -[[es-monitoring]] -= Monitoring {es} +[[monitor-elasticsearch-cluster]] += Monitor a cluster [partintro] -- -The Elastic {monitor-features} enable you to easily monitor the health of -your {es} cluster. The monitoring metrics are collected from each node and -stored in {es} indices. +The {stack} {monitor-features} provide a way to keep a pulse on the health and +performance of your {es} cluster. -TIP: In production environments, it is recommended to store the monitoring data -in a separate _monitoring cluster_. See -{stack-ov}/monitoring-production.html[Monitoring in a production environment]. - -Each {es} node is considered unique based on its persistent UUID, which is -written on first start to its <> directory, which -defaults to `./data`. - -All settings associated with monitoring in {es} must be set in either the -`elasticsearch.yml` file for each node or, where possible, in the dynamic -cluster settings. For more information, see <>. - -[[es-monitoring-overview]] -{es} is also at the core of monitoring across the {stack}. In all cases, -monitoring documents are just ordinary JSON documents built by monitoring each -{stack} component at some collection interval, then indexing those -documents into the monitoring cluster. - -Each component in the stack is responsible for monitoring itself and then -forwarding those documents to the {es} production cluster for both routing and -indexing (storage). The routing and indexing processes in {es} are handled by -what are called <> and -<>. - -Alternatively, in 6.4 and later, you can use {metricbeat} to collect -monitoring data about {kib} and ship it directly to the monitoring cluster, -rather than routing it through the production cluster. In 6.5 and later, you -can also use {metricbeat} to collect and ship data about {es}. - -You can view monitoring data from {kib} where it’s easy to spot issues at a -glance or delve into the system behavior over time to diagnose operational -issues. In addition to the built-in status warnings, you can also set up custom -alerts based on the data in the monitoring indices. - -For an introduction to monitoring your {stack}, including Beats, {ls}, and {kib}, -see {stack-ov}/xpack-monitoring.html[Monitoring the {stack}]. +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> -- -include::collectors.asciidoc[] -include::exporters.asciidoc[] +include::overview.asciidoc[] + +include::how-monitoring-works.asciidoc[] + +include::collecting-monitoring-data.asciidoc[] include::pause-export.asciidoc[] +include::configuring-metricbeat.asciidoc[] + +include::configuring-filebeat.asciidoc[] + +include::indices.asciidoc[] + +include::collectors.asciidoc[] + +include::exporters.asciidoc[] +include::local-export.asciidoc[] +include::http-export.asciidoc[] diff --git a/docs/reference/monitoring/indices.asciidoc b/docs/reference/monitoring/indices.asciidoc index 6586a945b5dd..c6432ea2e7f9 100644 --- a/docs/reference/monitoring/indices.asciidoc +++ b/docs/reference/monitoring/indices.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[config-monitoring-indices]] -=== Configuring indices for monitoring +== Configuring indices for monitoring <> are used to configure the indices that store the monitoring data collected from a cluster. diff --git a/docs/reference/monitoring/local-export.asciidoc b/docs/reference/monitoring/local-export.asciidoc index 821a6b1fc0e1..8723b226ca76 100644 --- a/docs/reference/monitoring/local-export.asciidoc +++ b/docs/reference/monitoring/local-export.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[local-exporter]] -=== Local Exporters +=== Local exporters The `local` exporter is the default exporter in {monitoring}. It routes data back into the same (local) cluster. In other words, it uses the production @@ -56,7 +56,7 @@ For more information about the configuration options for the `local` exporter, see <>. [[local-exporter-cleaner]] -==== Cleaner Service +==== Cleaner service One feature of the `local` exporter, which is not present in the `http` exporter, is a cleaner service. The cleaner service runs once per day at 01:00 AM UTC on diff --git a/docs/reference/monitoring/overview.asciidoc b/docs/reference/monitoring/overview.asciidoc new file mode 100644 index 000000000000..e4f58e4060c2 --- /dev/null +++ b/docs/reference/monitoring/overview.asciidoc @@ -0,0 +1,39 @@ +[role="xpack"] +[[monitoring-overview]] +== Monitoring overview +++++ +Overview +++++ + +When you monitor a cluster, you collect data from the {es} nodes, {ls} nodes, +{kib} instances, and Beats in your cluster. You can also +<>. + +All of the monitoring metrics are stored in {es}, which enables you to easily +visualize the data from {kib}. By default, the monitoring metrics are stored in +local indices. + +TIP: In production, we strongly recommend using a separate monitoring cluster. +Using a separate monitoring cluster prevents production cluster outages from +impacting your ability to access your monitoring data. It also prevents +monitoring activities from impacting the performance of your production cluster. +For the same reason, we also recommend using a separate {kib} instance for +viewing the monitoring data. + +You can use {metricbeat} to collect and ship data about {es}, {kib}, {ls}, and +Beats directly to your monitoring cluster rather than routing it through your +production cluster. The following diagram illustrates a typical monitoring +architecture with separate production and monitoring clusters: + +image::images/architecture.png[A typical monitoring environment] + +If you have the appropriate license, you can route data from multiple production +clusters to a single monitoring cluster. For more information about the +differences between various subscription levels, see: +https://www.elastic.co/subscriptions + +IMPORTANT: In general, the monitoring cluster and the clusters being monitored +should be running the same version of the stack. A monitoring cluster cannot +monitor production clusters running newer versions of the stack. If necessary, +the monitoring cluster can monitor production clusters running the latest +release of the previous major version. diff --git a/docs/reference/monitoring/pause-export.asciidoc b/docs/reference/monitoring/pause-export.asciidoc index 7a8bc664ffc3..6cf02a1f2401 100644 --- a/docs/reference/monitoring/pause-export.asciidoc +++ b/docs/reference/monitoring/pause-export.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[pause-export]] -== Pausing Data Collection +=== Pausing data collection To stop generating {monitoring} data in {es}, disable data collection: diff --git a/docs/reference/query-dsl/nested-query.asciidoc b/docs/reference/query-dsl/nested-query.asciidoc index 619fe8c3dda2..24d3e619f597 100644 --- a/docs/reference/query-dsl/nested-query.asciidoc +++ b/docs/reference/query-dsl/nested-query.asciidoc @@ -23,7 +23,7 @@ mapping. For example: ---- PUT /my_index { - "mappings": { + "mappings" : { "properties" : { "obj1" : { "type" : "nested" @@ -33,7 +33,6 @@ PUT /my_index } ---- -// TESTSETUP [[nested-query-ex-query]] ===== Example query @@ -42,7 +41,7 @@ PUT /my_index ---- GET /my_index/_search { - "query": { + "query": { "nested" : { "path" : "obj1", "query" : { @@ -58,6 +57,7 @@ GET /my_index/_search } } ---- +// TEST[continued] [[nested-top-level-params]] ==== Top-level parameters for `nested` @@ -78,6 +78,8 @@ such as `obj1.name`. Multi-level nesting is automatically supported, and detected, resulting in an inner nested query to automatically match the relevant nesting level, rather than root, if it exists within another nested query. + +See <> for an example. -- `score_mode`:: @@ -114,4 +116,160 @@ If `false`, {es} returns an error if the `path` is an unmapped field. You can use this parameter to query multiple indices that may not contain the field `path`. --- \ No newline at end of file +-- + +[[nested-query-notes]] +==== Notes + +[[multi-level-nested-query-ex]] +===== Multi-level nested queries + +To see how multi-level nested queries work, +first you need an index that has nested fields. +The following request defines mappings for the `drivers` index +with nested `make` and `model` fields. + +[source,console] +---- +PUT /drivers +{ + "mappings" : { + "properties" : { + "driver" : { + "type" : "nested", + "properties" : { + "last_name" : { + "type" : "text" + }, + "vehicle" : { + "type" : "nested", + "properties" : { + "make" : { + "type" : "text" + }, + "model" : { + "type" : "text" + } + } + } + } + } + } + } +} +---- + +Next, index some documents to the `drivers` index. + +[source,console] +---- +PUT /drivers/_doc/1 +{ + "driver" : { + "last_name" : "McQueen", + "vehicle" : [ + { + "make" : "Powell Motors", + "model" : "Canyonero" + }, + { + "make" : "Miller-Meteor", + "model" : "Ecto-1" + } + ] + } +} + +PUT /drivers/_doc/2?refresh +{ + "driver" : { + "last_name" : "Hudson", + "vehicle" : [ + { + "make" : "Mifune", + "model" : "Mach Five" + }, + { + "make" : "Miller-Meteor", + "model" : "Ecto-1" + } + ] + } +} +---- +// TEST[continued] + +You can now use a multi-level nested query +to match documents based on the `make` and `model` fields. + +[source,console] +---- +GET /drivers/_search +{ + "query" : { + "nested" : { + "path" : "driver", + "query" : { + "nested" : { + "path" : "driver.vehicle", + "query" : { + "bool" : { + "must" : [ + { "match" : { "driver.vehicle.make" : "Powell Motors" } }, + { "match" : { "driver.vehicle.model" : "Canyonero" } } + ] + } + } + } + } + } + } +} +---- +// TEST[continued] + +The search request returns the following response: + +[source,console-result] +---- +{ + "took" : 5, + "timed_out" : false, + "_shards" : { + "total" : 1, + "successful" : 1, + "skipped" : 0, + "failed" : 0 + }, + "hits" : { + "total" : { + "value" : 1, + "relation" : "eq" + }, + "max_score" : 3.7349272, + "hits" : [ + { + "_index" : "drivers", + "_id" : "1", + "_score" : 3.7349272, + "_source" : { + "driver" : { + "last_name" : "McQueen", + "vehicle" : [ + { + "make" : "Powell Motors", + "model" : "Canyonero" + }, + { + "make" : "Miller-Meteor", + "model" : "Ecto-1" + } + ] + } + } + } + ] + } +} +---- +// TESTRESPONSE[s/"took" : 5/"took": $body.took/] diff --git a/docs/reference/query-dsl/percolate-query.asciidoc b/docs/reference/query-dsl/percolate-query.asciidoc index a1b2cf7ff5d3..e9b919a1cb58 100644 --- a/docs/reference/query-dsl/percolate-query.asciidoc +++ b/docs/reference/query-dsl/percolate-query.asciidoc @@ -95,7 +95,6 @@ The above request will yield the following response: "hits": [ { <1> "_index": "my-index", - "_type": "_doc", "_id": "1", "_score": 0.26152915, "_source": { @@ -244,7 +243,6 @@ GET /my-index/_search "hits": [ { "_index": "my-index", - "_type": "_doc", "_id": "1", "_score": 0.7093853, "_source": { @@ -420,7 +418,6 @@ This will yield the following response. "hits": [ { "_index": "my-index", - "_type": "_doc", "_id": "3", "_score": 0.26152915, "_source": { @@ -441,7 +438,6 @@ This will yield the following response. }, { "_index": "my-index", - "_type": "_doc", "_id": "4", "_score": 0.26152915, "_source": { @@ -527,7 +523,6 @@ The slightly different response: "hits": [ { "_index": "my-index", - "_type": "_doc", "_id": "1", "_score": 0.7093853, "_source": { @@ -625,7 +620,6 @@ The above search request returns a response similar to this: "hits": [ { "_index": "my-index", - "_type": "_doc", "_id": "1", "_score": 0.26152915, "_source": { diff --git a/docs/reference/query-dsl/term-query.asciidoc b/docs/reference/query-dsl/term-query.asciidoc index c838c83e6396..9cd281ceb60e 100644 --- a/docs/reference/query-dsl/term-query.asciidoc +++ b/docs/reference/query-dsl/term-query.asciidoc @@ -202,7 +202,6 @@ in the results. "hits" : [ { "_index" : "my_index", - "_type" : "_doc", "_id" : "1", "_score" : 0.8630463, "_source" : { diff --git a/docs/reference/query-dsl/terms-query.asciidoc b/docs/reference/query-dsl/terms-query.asciidoc index 87b61906907f..85605457ea31 100644 --- a/docs/reference/query-dsl/terms-query.asciidoc +++ b/docs/reference/query-dsl/terms-query.asciidoc @@ -222,7 +222,6 @@ field, {es} returns both documents. "hits" : [ { "_index" : "my_index", - "_type" : "_doc", "_id" : "1", "_score" : 1.0, "_source" : { @@ -234,7 +233,6 @@ field, {es} returns both documents. }, { "_index" : "my_index", - "_type" : "_doc", "_id" : "2", "_score" : 1.0, "_source" : { diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index 7899b6693cad..baf0b5e820f7 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -616,6 +616,12 @@ The `common` terms query is deprecated. Use the <> instead. The `match` query skips blocks of documents efficiently, without any configuration, if the total number of hits is not tracked. +[role="exclude",id="indices-types-exists"] +=== Types Exists + +The types exists endpoint has been removed. See <> for +more details. + [role="exclude",id="xpack-api"] === X-Pack APIs @@ -902,4 +908,15 @@ See <>. [role="exclude",id="data-frame-transform-pivot"] ==== Pivot objects -See <>. \ No newline at end of file +See <>. + +[role="exclude",id="configuring-monitoring"] +=== Configuring monitoring + +See <>. + +[role="exclude",id="es-monitoring"] +=== Monitoring {es} + +See <>. + diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 44e1b0d46430..448a07324fd7 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -10,6 +10,13 @@ tag::aliases[] index. See <>. end::aliases[] +tag::target-index-aliases[] +`aliases`:: +(Optional, <>) +Index aliases which include the target index. +See <>. +end::target-index-aliases[] + tag::allow-no-indices[] `allow_no_indices`:: (Optional, boolean) If `true`, @@ -383,6 +390,12 @@ tag::pipeline[] (Optional, string) ID of the pipeline to use to preprocess incoming documents. end::pipeline[] +tag::path-pipeline[] +``:: +(Optional, string) Comma-separated list or wildcard expression of pipeline IDs +used to limit the request. +end::path-pipeline[] + tag::preference[] `preference`:: (Optional, string) Specifies the node or shard the operation should be @@ -478,15 +491,22 @@ the segment has most likely been written to disk but needs a <> to be searchable. end::segment-search[] +tag::segment-size[] +Disk space used by the segment, such as `50kb`. +end::segment-size[] + tag::settings[] `settings`:: (Optional, <>) Configuration options for the index. See <>. end::settings[] -tag::segment-size[] -Disk space used by the segment, such as `50kb`. -end::segment-size[] +tag::target-index-settings[] +`settings`:: +(Optional, <>) +Configuration options for the target index. +See <>. +end::target-index-settings[] tag::slices[] `slices`:: @@ -523,6 +543,17 @@ tag::stats[] purposes. end::stats[] +tag::target-index[] +``:: ++ +-- +(Required, string) +Name of the target index to create. + +include::{docdir}/indices/create-index.asciidoc[tag=index-name-reqs] +-- +end::target-index[] + tag::terminate_after[] `terminate_after`:: (Optional, integer) The maximum number of documents to collect for each shard, diff --git a/docs/reference/rollup/api-quickref.asciidoc b/docs/reference/rollup/api-quickref.asciidoc index d6be3e4e5b69..8a64d9df17f3 100644 --- a/docs/reference/rollup/api-quickref.asciidoc +++ b/docs/reference/rollup/api-quickref.asciidoc @@ -1,7 +1,10 @@ [role="xpack"] [testenv="basic"] [[rollup-api-quickref]] -== API Quick Reference +=== {rollup-cap} API quick reference +++++ +API quick reference +++++ experimental[] @@ -15,7 +18,7 @@ Most rollup endpoints have the following base: [float] [[rollup-api-jobs]] -=== /job/ +==== /job/ * {ref}/rollup-put-job.html[PUT /_rollup/job/+++]: Create a {rollup-job} * {ref}/rollup-get-job.html[GET /_rollup/job]: List {rollup-jobs} @@ -26,13 +29,13 @@ Most rollup endpoints have the following base: [float] [[rollup-api-data]] -=== /data/ +==== /data/ * {ref}/rollup-get-rollup-caps.html[GET /_rollup/data//_rollup_caps+++]: Get Rollup Capabilities * {ref}/rollup-get-rollup-index-caps.html[GET //_rollup/data/+++]: Get Rollup Index Capabilities [float] [[rollup-api-index]] -=== // +==== // * {ref}/rollup-search.html[GET //_rollup_search]: Search rollup data diff --git a/docs/reference/rollup/index.asciidoc b/docs/reference/rollup/index.asciidoc index 64dc233f82f6..99180e2f32d4 100644 --- a/docs/reference/rollup/index.asciidoc +++ b/docs/reference/rollup/index.asciidoc @@ -1,10 +1,7 @@ [role="xpack"] [testenv="basic"] [[xpack-rollup]] -= Rolling up historical data - -[partintro] --- +== Rolling up historical data experimental[] @@ -12,20 +9,20 @@ Keeping historical data around for analysis is extremely useful but often avoide archiving massive amounts of data. Retention periods are thus driven by financial realities rather than by the usefulness of extensive historical data. -The Rollup feature in {xpack} provides a means to summarize and store historical data so that it can still be used -for analysis, but at a fraction of the storage cost of raw data. +// tag::rollup-intro[] +The {stack} {rollup-features} provide a means to summarize and store historical +data so that it can still be used for analysis, but at a fraction of the storage +cost of raw data. +// end::rollup-intro[] - -* <> -* <> -* <> -* <> +* <> +* <> +* <> +* <> * <> -* <> +* <> --- - include::overview.asciidoc[] include::api-quickref.asciidoc[] include::rollup-getting-started.asciidoc[] diff --git a/docs/reference/rollup/overview.asciidoc b/docs/reference/rollup/overview.asciidoc index 90c5e20a850c..843cd5c05849 100644 --- a/docs/reference/rollup/overview.asciidoc +++ b/docs/reference/rollup/overview.asciidoc @@ -1,7 +1,10 @@ [role="xpack"] [testenv="basic"] [[rollup-overview]] -== Overview +=== {rollup-cap} overview +++++ +Overview +++++ experimental[] @@ -23,7 +26,7 @@ reading often diminishes with time. It's not useless -- it could easily contrib value often leads to deletion rather than paying the fixed storage cost. [float] -=== Rollup store historical data at reduced granularity +==== Rollup stores historical data at reduced granularity That's where Rollup comes into play. The Rollup functionality summarizes old, high-granularity data into a reduced granularity format for long-term storage. By "rolling" the data up into a single summary document, historical data @@ -39,7 +42,7 @@ automates this process of summarizing historical data. Details about setting up and configuring Rollup are covered in <> [float] -=== Rollup uses standard query DSL +==== Rollup uses standard query DSL The Rollup feature exposes a new search endpoint (`/_rollup_search` vs the standard `/_search`) which knows how to search over rolled-up data. Importantly, this endpoint accepts 100% normal {es} Query DSL. Your application does not need to learn @@ -53,7 +56,7 @@ But if your queries, aggregations and dashboards only use the available function data is trivial. [float] -=== Rollup merges "live" and "rolled" data +==== Rollup merges "live" and "rolled" data A useful feature of Rollup is the ability to query both "live", realtime data in addition to historical "rolled" data in a single query. @@ -67,7 +70,7 @@ It will take the results from both data sources and merge them together. If the "rolled" data, live data is preferred to increase accuracy. [float] -=== Rollup is multi-interval aware +==== Rollup is multi-interval aware Finally, Rollup is capable of intelligently utilizing the best interval available. If you've worked with summarizing features of other products, you'll find that they can be limiting. If you configure rollups at daily intervals... your diff --git a/docs/reference/rollup/rollup-agg-limitations.asciidoc b/docs/reference/rollup/rollup-agg-limitations.asciidoc index 9f8b6f66adee..6f9f949bf8b6 100644 --- a/docs/reference/rollup/rollup-agg-limitations.asciidoc +++ b/docs/reference/rollup/rollup-agg-limitations.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[rollup-agg-limitations]] -== Rollup Aggregation Limitations +=== {rollup-cap} aggregation limitations experimental[] @@ -9,7 +9,7 @@ There are some limitations to how fields can be rolled up / aggregated. This pa you are aware of them. [float] -=== Limited aggregation components +==== Limited aggregation components The Rollup functionality allows fields to be grouped with the following aggregations: diff --git a/docs/reference/rollup/rollup-getting-started.asciidoc b/docs/reference/rollup/rollup-getting-started.asciidoc index 27f9d9cd4060..3b57e968a9e5 100644 --- a/docs/reference/rollup/rollup-getting-started.asciidoc +++ b/docs/reference/rollup/rollup-getting-started.asciidoc @@ -1,7 +1,10 @@ [role="xpack"] [testenv="basic"] [[rollup-getting-started]] -== Getting Started +=== Getting started with {rollups} +++++ +Getting started +++++ experimental[] @@ -23,7 +26,7 @@ look like this: // NOTCONSOLE [float] -=== Creating a Rollup Job +==== Creating a rollup job We'd like to rollup these documents into hourly summaries, which will allow us to generate reports and dashboards with any time interval one hour or greater. A rollup job might look like this: @@ -103,7 +106,7 @@ After you execute the above command and create the job, you'll receive the follo ---- [float] -=== Starting the job +==== Starting the job After the job is created, it will be sitting in an inactive state. Jobs need to be started before they begin processing data (this allows you to stop them later as a way to temporarily pause, without deleting the configuration). @@ -117,7 +120,7 @@ POST _rollup/job/sensor/_start // TEST[setup:sensor_rollup_job] [float] -=== Searching the Rolled results +==== Searching the rolled results After the job has run and processed some data, we can use the <> endpoint to do some searching. The Rollup feature is designed so that you can use the same Query DSL syntax that you are accustomed to... it just happens to run on the rolled up data instead. @@ -292,7 +295,7 @@ In addition to being more complicated (date histogram and a terms aggregation, p the date_histogram uses a `7d` interval instead of `60m`. [float] -=== Conclusion +==== Conclusion This quickstart should have provided a concise overview of the core functionality that Rollup exposes. There are more tips and things to consider when setting up Rollups, which you can find throughout the rest of this section. You may also explore the <> diff --git a/docs/reference/rollup/rollup-search-limitations.asciidoc b/docs/reference/rollup/rollup-search-limitations.asciidoc index d55787f3cec5..f6315e12a300 100644 --- a/docs/reference/rollup/rollup-search-limitations.asciidoc +++ b/docs/reference/rollup/rollup-search-limitations.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[rollup-search-limitations]] -== Rollup Search Limitations +=== {rollup-cap} search limitations experimental[] @@ -11,7 +11,7 @@ live data is thrown away, you will always lose some flexibility. This page highlights the major limitations so that you are aware of them. [float] -=== Only one Rollup index per search +==== Only one {rollup} index per search When using the <> endpoint, the `index` parameter accepts one or more indices. These can be a mix of regular, non-rollup indices and rollup indices. However, only one rollup index can be specified. The exact list of rules for the `index` parameter are as @@ -33,7 +33,7 @@ may be able to open this up to multiple rollup jobs. [float] [[aggregate-stored-only]] -=== Can only aggregate what's been stored +==== Can only aggregate what's been stored A perhaps obvious limitation, but rollups can only aggregate on data that has been stored in the rollups. If you don't configure the rollup job to store metrics about the `price` field, you won't be able to use the `price` field in any query or aggregation. @@ -81,7 +81,7 @@ The response will tell you that the field and aggregation were not possible, bec // TESTRESPONSE[s/"stack_trace": \.\.\./"stack_trace": $body.$_path/] [float] -=== Interval Granularity +==== Interval granularity Rollups are stored at a certain granularity, as defined by the `date_histogram` group in the configuration. This means you can only search/aggregate the rollup data with an interval that is greater-than or equal to the configured rollup interval. @@ -111,7 +111,7 @@ That said, if multiple jobs are present in a single rollup index with varying in with the largest interval to satisfy the search request. [float] -=== Limited querying components +==== Limited querying components The Rollup functionality allows `query`'s in the search request, but with a limited subset of components. The queries currently allowed are: @@ -128,7 +128,7 @@ If you attempt to use an unsupported query, or the query references a field that thrown. We expect the list of support queries to grow over time as more are implemented. [float] -=== Timezones +==== Timezones Rollup documents are stored in the timezone of the `date_histogram` group configuration in the job. If no timezone is specified, the default is to rollup timestamps in `UTC`. diff --git a/docs/reference/rollup/understanding-groups.asciidoc b/docs/reference/rollup/understanding-groups.asciidoc index a59c19fbf5cc..eb1b47e8a16d 100644 --- a/docs/reference/rollup/understanding-groups.asciidoc +++ b/docs/reference/rollup/understanding-groups.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[rollup-understanding-groups]] -== Understanding Groups +=== Understanding groups experimental[] @@ -121,7 +121,7 @@ Ultimately, when configuring `groups` for a job, think in terms of how you might then include those in the config. Because Rollup Search allows any order or combination of the grouped fields, you just need to decide if a field is useful for aggregating later, and how you might wish to use it (terms, histogram, etc) -=== Grouping Limitations with heterogeneous indices +==== Grouping limitations with heterogeneous indices There was previously a limitation in how Rollup could handle indices that had heterogeneous mappings (multiple, unrelated/non-overlapping mappings). The recommendation at the time was to configure a separate job per data "type". For example, you might configure a separate @@ -192,7 +192,7 @@ PUT _rollup/job/combined -------------------------------------------------- // NOTCONSOLE -=== Doc counts and overlapping jobs +==== Doc counts and overlapping jobs There was previously an issue with document counts on "overlapping" job configurations, driven by the same internal implementation detail. If there were two Rollup jobs saving to the same index, where one job is a "subset" of another job, it was possible that document counts diff --git a/docs/reference/search/explain.asciidoc b/docs/reference/search/explain.asciidoc index 1b5d280559c4..a9d431e70228 100644 --- a/docs/reference/search/explain.asciidoc +++ b/docs/reference/search/explain.asciidoc @@ -103,7 +103,6 @@ The API returns the following response: -------------------------------------------------- { "_index":"twitter", - "_type":"_doc", "_id":"0", "matched":true, "explanation":{ diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 0d83d2930521..0b959f87e0e8 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -1,20 +1,30 @@ [[search-profile]] === Profile API -WARNING: The Profile API is a debugging tool and adds significant overhead to search execution. +WARNING: The Profile API is a debugging tool and adds significant overhead to search execution. -The Profile API provides detailed timing information about the execution of individual components -in a search request. It gives the user insight into how search requests are executed at a low level so that -the user can understand why certain requests are slow, and take steps to improve them. -Note that the Profile API, <>, doesn't measure -network latency, time spent in the search fetch phase, time spent while the requests spends -in queues or while merging shard responses on the coordinating node. +Provides detailed timing information about the execution of individual +components in a search request. -The output from the Profile API is *very* verbose, especially for complicated requests executed across -many shards. Pretty-printing the response is recommended to help understand the output -[float] -==== Usage +[[search-profile-api-desc]] +==== {api-description-title} + +The Profile API gives the user insight into how search requests are executed at +a low level so that the user can understand why certain requests are slow, and +take steps to improve them. Note that the Profile API, +<>, doesn't measure network latency, +time spent in the search fetch phase, time spent while the requests spends in +queues or while merging shard responses on the coordinating node. + +The output from the Profile API is *very* verbose, especially for complicated +requests executed across many shards. Pretty-printing the response is +recommended to help understand the output. + + +[[search-profile-api-example]] +==== {api-examples-title} + Any `_search` request can be profiled by adding a top-level `profile` parameter: @@ -31,9 +41,10 @@ GET /twitter/_search // TEST[setup:twitter] <1> Setting the top-level `profile` parameter to `true` will enable profiling -for the search +for the search. -This will yield the following result: + +The API returns the following result: [source,console-result] -------------------------------------------------- @@ -167,12 +178,13 @@ This will yield the following result: // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] // TESTRESPONSE[s/\[2aE02wS1R8q_QFnYu6vDVQ\]\[twitter\]\[0\]/$body.$_path/] -<1> Search results are returned, but were omitted here for brevity +<1> Search results are returned, but were omitted here for brevity. -Even for a simple query, the response is relatively complicated. Let's break it down piece-by-piece before moving -to more complex examples. +Even for a simple query, the response is relatively complicated. Let's break it +down piece-by-piece before moving to more complex examples. -First, the overall structure of the profile response is as follows: + +The overall structure of the profile response is as follows: [source,console-result] -------------------------------------------------- @@ -200,27 +212,37 @@ First, the overall structure of the profile response is as follows: // TESTRESPONSE[s/"query": \[...\]/"query": $body.$_path/] // TESTRESPONSE[s/"collector": \[...\]/"collector": $body.$_path/] // TESTRESPONSE[s/"aggregations": \[...\]/"aggregations": []/] -<1> A profile is returned for each shard that participated in the response, and is identified -by a unique ID -<2> Each profile contains a section which holds details about the query execution -<3> Each profile has a single time representing the cumulative rewrite time -<4> Each profile also contains a section about the Lucene Collectors which run the search -<5> Each profile contains a section which holds the details about the aggregation execution +<1> A profile is returned for each shard that participated in the response, and +is identified by a unique ID. +<2> Each profile contains a section which holds details about the query +execution. +<3> Each profile has a single time representing the cumulative rewrite time. +<4> Each profile also contains a section about the Lucene Collectors which run +the search. +<5> Each profile contains a section which holds the details about the +aggregation execution. -Because a search request may be executed against one or more shards in an index, and a search may cover -one or more indices, the top level element in the profile response is an array of `shard` objects. -Each shard object lists its `id` which uniquely identifies the shard. The ID's format is +Because a search request may be executed against one or more shards in an index, +and a search may cover one or more indices, the top level element in the profile +response is an array of `shard` objects. Each shard object lists its `id` which +uniquely identifies the shard. The ID's format is `[nodeID][indexName][shardID]`. -The profile itself may consist of one or more "searches", where a search is a query executed against the underlying -Lucene index. Most search requests submitted by the user will only execute a single `search` against the Lucene index. -But occasionally multiple searches will be executed, such as including a global aggregation (which needs to execute -a secondary "match_all" query for the global context). +The profile itself may consist of one or more "searches", where a search is a +query executed against the underlying Lucene index. Most search requests +submitted by the user will only execute a single `search` against the Lucene +index. But occasionally multiple searches will be executed, such as including a +global aggregation (which needs to execute a secondary "match_all" query for the +global context). Inside each `search` object there will be two arrays of profiled information: -a `query` array and a `collector` array. Alongside the `search` object is an `aggregations` object that contains the profile information for the aggregations. In the future, more sections may be added, such as `suggest`, `highlight`, etc. +a `query` array and a `collector` array. Alongside the `search` object is an +`aggregations` object that contains the profile information for the +aggregations. In the future, more sections may be added, such as `suggest`, +`highlight`, etc. -There will also be a `rewrite` metric showing the total time spent rewriting the query (in nanoseconds). +There will also be a `rewrite` metric showing the total time spent rewriting the +query (in nanoseconds). NOTE: As with other statistics apis, the Profile API supports human readable outputs. This can be turned on by adding `?human=true` to the query string. In this case, the output contains the additional `time` field containing rounded, @@ -245,10 +267,11 @@ the `advance` phase of that query is the cause, for example. [[query-section]] ===== `query` Section -The `query` section contains detailed timing of the query tree executed by Lucene on a particular shard. -The overall structure of this query tree will resemble your original Elasticsearch query, but may be slightly -(or sometimes very) different. It will also use similar but not always identical naming. Using our previous -`match` query example, let's analyze the `query` section: +The `query` section contains detailed timing of the query tree executed by +Lucene on a particular shard. The overall structure of this query tree will +resemble your original Elasticsearch query, but may be slightly (or sometimes +very) different. It will also use similar but not always identical naming. +Using our previous `match` query example, let's analyze the `query` section: [source,console-result] -------------------------------------------------- @@ -279,25 +302,30 @@ The overall structure of this query tree will resemble your original Elasticsear // TESTRESPONSE[s/]$/],"rewrite_time": $body.$_path, "collector": $body.$_path}], "aggregations": []}]}}/] // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] // TESTRESPONSE[s/"breakdown": \{...\}/"breakdown": $body.$_path/] -<1> The breakdown timings are omitted for simplicity +<1> The breakdown timings are omitted for simplicity. -Based on the profile structure, we can see that our `match` query was rewritten by Lucene into a BooleanQuery with two -clauses (both holding a TermQuery). The `type` field displays the Lucene class name, and often aligns with -the equivalent name in Elasticsearch. The `description` field displays the Lucene explanation text for the query, and -is made available to help differentiating between parts of your query (e.g. both `message:search` and `message:test` -are TermQuery's and would appear identical otherwise. +Based on the profile structure, we can see that our `match` query was rewritten +by Lucene into a BooleanQuery with two clauses (both holding a TermQuery). The +`type` field displays the Lucene class name, and often aligns with the +equivalent name in Elasticsearch. The `description` field displays the Lucene +explanation text for the query, and is made available to help differentiating +between parts of your query (e.g. both `message:search` and `message:test` are +TermQuery's and would appear identical otherwise. -The `time_in_nanos` field shows that this query took ~1.8ms for the entire BooleanQuery to execute. The recorded time is inclusive -of all children. +The `time_in_nanos` field shows that this query took ~1.8ms for the entire +BooleanQuery to execute. The recorded time is inclusive of all children. -The `breakdown` field will give detailed stats about how the time was spent, we'll look at -that in a moment. Finally, the `children` array lists any sub-queries that may be present. Because we searched for two -values ("search test"), our BooleanQuery holds two children TermQueries. They have identical information (type, time, -breakdown, etc). Children are allowed to have their own children. +The `breakdown` field will give detailed stats about how the time was spent, +we'll look at that in a moment. Finally, the `children` array lists any +sub-queries that may be present. Because we searched for two values ("search +test"), our BooleanQuery holds two children TermQueries. They have identical +information (type, time, breakdown, etc). Children are allowed to have their +own children. -====== Timing Breakdown +===== Timing Breakdown -The `breakdown` component lists detailed timing statistics about low-level Lucene execution: +The `breakdown` component lists detailed timing statistics about low-level +Lucene execution: [source,console-result] -------------------------------------------------- @@ -326,10 +354,12 @@ The `breakdown` component lists detailed timing statistics about low-level Lucen // TESTRESPONSE[s/}$/},\n"children": $body.$_path}],\n"rewrite_time": $body.$_path, "collector": $body.$_path}], "aggregations": []}]}}/] // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] -Timings are listed in wall-clock nanoseconds and are not normalized at all. All caveats about the overall -`time_in_nanos` apply here. The intention of the breakdown is to give you a feel for A) what machinery in Lucene is -actually eating time, and B) the magnitude of differences in times between the various components. Like the overall time, -the breakdown is inclusive of all children times. +Timings are listed in wall-clock nanoseconds and are not normalized at all. All +caveats about the overall `time_in_nanos` apply here. The intention of the +breakdown is to give you a feel for A) what machinery in Lucene is actually +eating time, and B) the magnitude of differences in times between the various +components. Like the overall time, the breakdown is inclusive of all children +times. The meaning of the stats are as follows: @@ -399,13 +429,15 @@ The meaning of the stats are as follows: means the `nextDoc()` method was called on two different documents. This can be used to help judge how selective queries are, by comparing counts between different query components. + [[collectors-section]] ===== `collectors` Section -The Collectors portion of the response shows high-level execution details. Lucene works by defining a "Collector" -which is responsible for coordinating the traversal, scoring, and collection of matching documents. Collectors -are also how a single query can record aggregation results, execute unscoped "global" queries, execute post-query -filters, etc. +The Collectors portion of the response shows high-level execution details. +Lucene works by defining a "Collector" which is responsible for coordinating the +traversal, scoring, and collection of matching documents. Collectors are also +how a single query can record aggregation results, execute unscoped "global" +queries, execute post-query filters, etc. Looking at the previous example: @@ -430,15 +462,20 @@ Looking at the previous example: // TESTRESPONSE[s/]$/]}], "aggregations": []}]}}/] // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] -We see a single collector named `SimpleTopScoreDocCollector` wrapped into `CancellableCollector`. `SimpleTopScoreDocCollector` is the default "scoring and sorting" -`Collector` used by Elasticsearch. The `reason` field attempts to give a plain English description of the class name. The -`time_in_nanos` is similar to the time in the Query tree: a wall-clock time inclusive of all children. Similarly, `children` lists -all sub-collectors. The `CancellableCollector` that wraps `SimpleTopScoreDocCollector` is used by Elasticsearch to detect if the current -search was cancelled and stop collecting documents as soon as it occurs. -It should be noted that Collector times are **independent** from the Query times. They are calculated, combined, -and normalized independently! Due to the nature of Lucene's execution, it is impossible to "merge" the times -from the Collectors into the Query section, so they are displayed in separate portions. +We see a single collector named `SimpleTopScoreDocCollector` wrapped into +`CancellableCollector`. `SimpleTopScoreDocCollector` is the default "scoring and +sorting" `Collector` used by {es}. The `reason` field attempts to give a plain +English description of the class name. The `time_in_nanos` is similar to the +time in the Query tree: a wall-clock time inclusive of all children. Similarly, +`children` lists all sub-collectors. The `CancellableCollector` that wraps +`SimpleTopScoreDocCollector` is used by {es} to detect if the current search was +cancelled and stop collecting documents as soon as it occurs. + +It should be noted that Collector times are **independent** from the Query +times. They are calculated, combined, and normalized independently! Due to the +nature of Lucene's execution, it is impossible to "merge" the times from the +Collectors into the Query section, so they are displayed in separate portions. For reference, the various collector reasons are: @@ -489,20 +526,22 @@ For reference, the various collector reasons are: [[rewrite-section]] ===== `rewrite` Section -All queries in Lucene undergo a "rewriting" process. A query (and its sub-queries) may be rewritten one or -more times, and the process continues until the query stops changing. This process allows Lucene to perform -optimizations, such as removing redundant clauses, replacing one query for a more efficient execution path, -etc. For example a Boolean -> Boolean -> TermQuery can be rewritten to a TermQuery, because all the Booleans -are unnecessary in this case. +All queries in Lucene undergo a "rewriting" process. A query (and its +sub-queries) may be rewritten one or more times, and the process continues until +the query stops changing. This process allows Lucene to perform optimizations, +such as removing redundant clauses, replacing one query for a more efficient +execution path, etc. For example a Boolean -> Boolean -> TermQuery can be +rewritten to a TermQuery, because all the Booleans are unnecessary in this case. -The rewriting process is complex and difficult to display, since queries can change drastically. Rather than -showing the intermediate results, the total rewrite time is simply displayed as a value (in nanoseconds). This -value is cumulative and contains the total time for all queries being rewritten. +The rewriting process is complex and difficult to display, since queries can +change drastically. Rather than showing the intermediate results, the total +rewrite time is simply displayed as a value (in nanoseconds). This value is +cumulative and contains the total time for all queries being rewritten. ===== A more complex example - -To demonstrate a slightly more complex query and the associated results, we can profile the following query: +To demonstrate a slightly more complex query and the associated results, we can +profile the following query: [source,console] -------------------------------------------------- @@ -550,7 +589,8 @@ This example has: - A global aggregation - A post_filter -And the response: + +The API returns the following result: [source,console-result] -------------------------------------------------- @@ -660,47 +700,58 @@ And the response: // TESTRESPONSE[s/\.\.\.//] // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] // TESTRESPONSE[s/"id": "\[P6-vulHtQRWuD4YnubWb7A\]\[test\]\[0\]"/"id": $body.profile.shards.0.id/] -<1> The `"aggregations"` portion has been omitted because it will be covered in the next section +<1> The `"aggregations"` portion has been omitted because it will be covered in +the next section. -As you can see, the output is significantly more verbose than before. All the major portions of the query are -represented: +As you can see, the output is significantly more verbose than before. All the +major portions of the query are represented: -1. The first `TermQuery` (user:test) represents the main `term` query -2. The second `TermQuery` (message:some) represents the `post_filter` query +1. The first `TermQuery` (user:test) represents the main `term` query. +2. The second `TermQuery` (message:some) represents the `post_filter` query. -The Collector tree is fairly straightforward, showing how a single CancellableCollector wraps a MultiCollector - which also wraps a FilteredCollector to execute the post_filter (and in turn wraps the normal scoring SimpleCollector), - a BucketCollector to run all scoped aggregations. +The Collector tree is fairly straightforward, showing how a single +CancellableCollector wraps a MultiCollector which also wraps a FilteredCollector +to execute the post_filter (and in turn wraps the normal scoring +SimpleCollector), a BucketCollector to run all scoped aggregations. ===== Understanding MultiTermQuery output -A special note needs to be made about the `MultiTermQuery` class of queries. This includes wildcards, regex, and fuzzy -queries. These queries emit very verbose responses, and are not overly structured. +A special note needs to be made about the `MultiTermQuery` class of queries. +This includes wildcards, regex, and fuzzy queries. These queries emit very +verbose responses, and are not overly structured. -Essentially, these queries rewrite themselves on a per-segment basis. If you imagine the wildcard query `b*`, it technically -can match any token that begins with the letter "b". It would be impossible to enumerate all possible combinations, -so Lucene rewrites the query in context of the segment being evaluated, e.g., one segment may contain the tokens -`[bar, baz]`, so the query rewrites to a BooleanQuery combination of "bar" and "baz". Another segment may only have the -token `[bakery]`, so the query rewrites to a single TermQuery for "bakery". +Essentially, these queries rewrite themselves on a per-segment basis. If you +imagine the wildcard query `b*`, it technically can match any token that begins +with the letter "b". It would be impossible to enumerate all possible +combinations, so Lucene rewrites the query in context of the segment being +evaluated, e.g., one segment may contain the tokens `[bar, baz]`, so the query +rewrites to a BooleanQuery combination of "bar" and "baz". Another segment may +only have the token `[bakery]`, so the query rewrites to a single TermQuery for +"bakery". -Due to this dynamic, per-segment rewriting, the clean tree structure becomes distorted and no longer follows a clean -"lineage" showing how one query rewrites into the next. At present time, all we can do is apologize, and suggest you -collapse the details for that query's children if it is too confusing. Luckily, all the timing statistics are correct, -just not the physical layout in the response, so it is sufficient to just analyze the top-level MultiTermQuery and +Due to this dynamic, per-segment rewriting, the clean tree structure becomes +distorted and no longer follows a clean "lineage" showing how one query rewrites +into the next. At present time, all we can do is apologize, and suggest you +collapse the details for that query's children if it is too confusing. Luckily, +all the timing statistics are correct, just not the physical layout in the +response, so it is sufficient to just analyze the top-level MultiTermQuery and ignore its children if you find the details too tricky to interpret. -Hopefully this will be fixed in future iterations, but it is a tricky problem to solve and still in-progress :) +Hopefully this will be fixed in future iterations, but it is a tricky problem to +solve and still in-progress. :) [[profiling-aggregations]] -==== Profiling Aggregations +===== Profiling Aggregations + [[agg-section]] -===== `aggregations` Section +====== `aggregations` Section -The `aggregations` section contains detailed timing of the aggregation tree executed by a particular shard. -The overall structure of this aggregation tree will resemble your original Elasticsearch request. Let's -execute the previous query again and look at the aggregation profile this time: +The `aggregations` section contains detailed timing of the aggregation tree +executed by a particular shard. The overall structure of this aggregation tree +will resemble your original {es} request. Let's execute the previous query again +and look at the aggregation profile this time: [source,console] -------------------------------------------------- @@ -741,6 +792,7 @@ GET /twitter/_search // TEST[s/_search/_search\?filter_path=profile.shards.aggregations/] // TEST[continued] + This yields the following aggregation profile output: [source,console-result] @@ -807,16 +859,20 @@ This yields the following aggregation profile output: // TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/] // TESTRESPONSE[s/"id": "\[P6-vulHtQRWuD4YnubWb7A\]\[test\]\[0\]"/"id": $body.profile.shards.0.id/] -From the profile structure we can see that the `my_scoped_agg` is internally being run as a `LongTermsAggregator` (because the field it is -aggregating, `likes`, is a numeric field). At the same level, we see a `GlobalAggregator` which comes from `my_global_agg`. That -aggregation then has a child `LongTermsAggregator` which comes from the second term's aggregation on `likes`. +From the profile structure we can see that the `my_scoped_agg` is internally +being run as a `LongTermsAggregator` (because the field it is aggregating, +`likes`, is a numeric field). At the same level, we see a `GlobalAggregator` +which comes from `my_global_agg`. That aggregation then has a child +`LongTermsAggregator` which comes from the second term's aggregation on `likes`. -The `time_in_nanos` field shows the time executed by each aggregation, and is inclusive of all children. While the overall time is useful, -the `breakdown` field will give detailed stats about how the time was spent. +The `time_in_nanos` field shows the time executed by each aggregation, and is +inclusive of all children. While the overall time is useful, the `breakdown` +field will give detailed stats about how the time was spent. -====== Timing Breakdown +===== Timing Breakdown -The `breakdown` component lists detailed timing statistics about low-level Lucene execution: +The `breakdown` component lists detailed timing statistics about low-level +Lucene execution: [source,js] -------------------------------------------------- @@ -834,10 +890,11 @@ The `breakdown` component lists detailed timing statistics about low-level Lucen -------------------------------------------------- // NOTCONSOLE -Timings are listed in wall-clock nanoseconds and are not normalized at all. All caveats about the overall -`time` apply here. The intention of the breakdown is to give you a feel for A) what machinery in Elasticsearch is -actually eating time, and B) the magnitude of differences in times between the various components. Like the overall time, -the breakdown is inclusive of all children times. +Timings are listed in wall-clock nanoseconds and are not normalized at all. All +caveats about the overall `time` apply here. The intention of the breakdown is +to give you a feel for A) what machinery in {es} is actually eating time, and B) +the magnitude of differences in times between the various components. Like the +overall time, the breakdown is inclusive of all children times. The meaning of the stats are as follows: @@ -866,28 +923,33 @@ The meaning of the stats are as follows: means the `collect()` method was called on two different documents. [[profiling-considerations]] -==== Profiling Considerations +===== Profiling Considerations -===== Performance Notes +Like any profiler, the Profile API introduces a non-negligible overhead to +search execution. The act of instrumenting low-level method calls such as +`collect`, `advance`, and `next_doc` can be fairly expensive, since these +methods are called in tight loops. Therefore, profiling should not be enabled +in production settings by default, and should not be compared against +non-profiled query times. Profiling is just a diagnostic tool. -Like any profiler, the Profile API introduces a non-negligible overhead to search execution. The act of instrumenting -low-level method calls such as `collect`, `advance`, and `next_doc` can be fairly expensive, since these methods are called -in tight loops. Therefore, profiling should not be enabled in production settings by default, and should not -be compared against non-profiled query times. Profiling is just a diagnostic tool. - -There are also cases where special Lucene optimizations are disabled, since they are not amenable to profiling. This -could cause some queries to report larger relative times than their non-profiled counterparts, but in general should -not have a drastic effect compared to other components in the profiled query. +There are also cases where special Lucene optimizations are disabled, since they +are not amenable to profiling. This could cause some queries to report larger +relative times than their non-profiled counterparts, but in general should not +have a drastic effect compared to other components in the profiled query. [[profile-limitations]] ===== Limitations -- Profiling currently does not measure the search fetch phase nor the network overhead -- Profiling also does not account for time spent in the queue, merging shard responses on the coordinating node, or -additional work such as building global ordinals (an internal data structure used to speed up search) -- Profiling statistics are currently not available for suggestions, highlighting, `dfs_query_then_fetch` -- Profiling of the reduce phase of aggregation is currently not available -- The Profiler is still highly experimental. The Profiler is instrumenting parts of Lucene that were -never designed to be exposed in this manner, and so all results should be viewed as a best effort to provide detailed -diagnostics. We hope to improve this over time. If you find obviously wrong numbers, strange query structures, or -other bugs, please report them! +- Profiling currently does not measure the search fetch phase nor the network +overhead. +- Profiling also does not account for time spent in the queue, merging shard +responses on the coordinating node, or additional work such as building global +ordinals (an internal data structure used to speed up search). +- Profiling statistics are currently not available for suggestions, +highlighting, `dfs_query_then_fetch`. +- Profiling of the reduce phase of aggregation is currently not available. +- The Profiler is still highly experimental. The Profiler is instrumenting parts +of Lucene that were never designed to be exposed in this manner, and so all +results should be viewed as a best effort to provide detailed diagnostics. We +hope to improve this over time. If you find obviously wrong numbers, strange +query structures, or other bugs, please report them! diff --git a/docs/reference/search/rank-eval.asciidoc b/docs/reference/search/rank-eval.asciidoc index a8a9c0a8ac09..14c9e54bb262 100644 --- a/docs/reference/search/rank-eval.asciidoc +++ b/docs/reference/search/rank-eval.asciidoc @@ -3,35 +3,87 @@ experimental["The ranking evaluation API is experimental and may be changed or removed completely in a future release, as well as change in non-backwards compatible ways on minor versions updates. Elastic will take a best effort approach to fix any issues, but experimental features are not subject to the support SLA of official GA features."] -The ranking evaluation API allows to evaluate the quality of ranked search +Allows you to evaluate the quality of ranked search results over a set of +typical search queries. + + +[[search-rank-eval-api-request]] +==== {api-request-title} + +`GET //_rank_eval` + +`POST //_rank_eval` + + +[[search-rank-eval-api-desc]] +==== {api-description-title} + +The ranking evaluation API allows you to evaluate the quality of ranked search results over a set of typical search queries. Given this set of queries and a list of manually rated documents, the `_rank_eval` endpoint calculates and returns typical information retrieval metrics like _mean reciprocal rank_, _precision_ or _discounted cumulative gain_. -[float] -==== Overview +Search quality evaluation starts with looking at the users of your search +application, and the things that they are searching for. Users have a specific +_information need_, for example they are looking for gift in a web shop or want +to book a flight for their next holiday. They usually enter some search terms +into a search box or some other web form. All of this information, together with +meta information about the user (for example the browser, location, earlier +preferences and so on) then gets translated into a query to the underlying +search system. -Search quality evaluation starts with looking at the users of your search application, and the things that they are searching for. -Users have a specific _information need_, e.g. they are looking for gift in a web shop or want to book a flight for their next holiday. -They usually enter some search terms into a search box or some other web form. -All of this information, together with meta information about the user (e.g. the browser, location, earlier preferences etc...) then gets translated into a query to the underlying search system. +The challenge for search engineers is to tweak this translation process from +user entries to a concrete query in such a way, that the search results contain +the most relevant information with respect to the users information need. This +can only be done if the search result quality is evaluated constantly across a +representative test suite of typical user queries, so that improvements in the +rankings for one particular query doesn't negatively effect the ranking for +other types of queries. -The challenge for search engineers is to tweak this translation process from user entries to a concrete query in such a way, that the search results contain the most relevant information with respect to the users information need. -This can only be done if the search result quality is evaluated constantly across a representative test suite of typical user queries, so that improvements in the rankings for one particular query doesn't negatively effect the ranking for other types of queries. +In order to get started with search quality evaluation, three basic things are +needed: -In order to get started with search quality evaluation, three basic things are needed: +. A collection of documents you want to evaluate your query performance against, + usually one or more indices. +. A collection of typical search requests that users enter into your system. +. A set of document ratings that judge the documents relevance with respect to a + search request. + +It is important to note that one set of document ratings is needed per test +query, and that the relevance judgements are based on the information need of +the user that entered the query. -. a collection of documents you want to evaluate your query performance against, usually one or more indices -. a collection of typical search requests that users enter into your system -. a set of document ratings that judge the documents relevance with respect to a search request+ - It is important to note that one set of document ratings is needed per test query, and that - the relevance judgements are based on the information need of the user that entered the query. +The ranking evaluation API provides a convenient way to use this information in +a ranking evaluation request to calculate different search evaluation metrics. +This gives a first estimation of your overall search quality and give you a +measurement to optimize against when fine-tuning various aspect of the query +generation in your application. -The ranking evaluation API provides a convenient way to use this information in a ranking evaluation request to calculate different search evaluation metrics. This gives a first estimation of your overall search quality and give you a measurement to optimize against when fine-tuning various aspect of the query generation in your application. -[float] -==== Ranking evaluation request structure +[[search-rank-eval-api-path-params]] +==== {api-path-parms-title} + +``:: + (Required, string) Comma-separated list or wildcard expression of index names + used to limit the request. + +[[search-rank-eval-api-query-params]] +==== {api-query-parms-title} + +include::{docdir}/rest-api/common-parms.asciidoc[tag=allow-no-indices] + +include::{docdir}/rest-api/common-parms.asciidoc[tag=expand-wildcards] ++ +-- +Defaults to `open`. +-- + +include::{docdir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailable] + + +[[search-rank-eval-api-example]] +==== {api-examples-title} In its most basic form, a request to the `_rank_eval` endpoint has two sections: @@ -51,10 +103,13 @@ GET /my_index/_rank_eval <2> definition of the evaluation metric to calculate <3> a specific metric and its parameters -The request section contains several search requests typical to your application, along with the document ratings for each particular search request, e.g. +The request section contains several search requests typical to your +application, along with the document ratings for each particular search request. [source,js] ----------------------------- +GET /my_index/_rank_eval +{ "requests": [ { "id": "amsterdam_query", <1> @@ -77,20 +132,30 @@ The request section contains several search requests typical to your application ] } ] + } ----------------------------- // NOTCONSOLE <1> the search requests id, used to group result details later <2> the query that is being evaluated -<3> a list of document ratings, each entry containing the documents `_index` and `_id` together with -the rating of the documents relevance with regards to this search request +<3> a list of document ratings, each entry containing the documents `_index` and +`_id` together with the rating of the documents relevance with regards to this +search request -A document `rating` can be any integer value that expresses the relevance of the document on a user defined scale. For some of the metrics, just giving a binary rating (e.g. `0` for irrelevant and `1` for relevant) will be sufficient, other metrics can use a more fine grained scale. +A document `rating` can be any integer value that expresses the relevance of the +document on a user defined scale. For some of the metrics, just giving a binary +rating (for example `0` for irrelevant and `1` for relevant) will be sufficient, +other metrics can use a more fine grained scale. -[float] -==== Template based ranking evaluation -As an alternative to having to provide a single query per test request, it is possible to specify query templates in the evaluation request and later refer to them. Queries with similar structure that only differ in their parameters don't have to be repeated all the time in the `requests` section this way. In typical search systems where user inputs usually get filled into a small set of query templates, this helps making the evaluation request more succinct. +===== Template based ranking evaluation + +As an alternative to having to provide a single query per test request, it is +possible to specify query templates in the evaluation request and later refer to +them. Queries with similar structure that only differ in their parameters don't +have to be repeated all the time in the `requests` section this way. In typical +search systems where user inputs usually get filled into a small set of query +templates, this helps making the evaluation request more succinct. [source,js] -------------------------------- @@ -129,23 +194,30 @@ GET /my_index/_rank_eval <3> a reference to a previously defined template <4> the parameters to use to fill the template -[float] -==== Available evaluation metrics -The `metric` section determines which of the available evaluation metrics is going to be used. -Currently, the following metrics are supported: +===== Available evaluation metrics + +The `metric` section determines which of the available evaluation metrics is +going to be used. The following metrics are supported: [float] [[k-precision]] ===== Precision at K (P@k) -This metric measures the number of relevant results in the top k search results. Its a form of the well known https://en.wikipedia.org/wiki/Information_retrieval#Precision[Precision] metric that only looks at the top k documents. It is the fraction of relevant documents in those first k -search. A precision at 10 (P@10) value of 0.6 then means six out of the 10 top hits are relevant with respect to the users information need. +This metric measures the number of relevant results in the top k search results. +Its a form of the well known +https://en.wikipedia.org/wiki/Information_retrieval#Precision[Precision] metric +that only looks at the top k documents. It is the fraction of relevant documents +in those first k search. A precision at 10 (P@10) value of 0.6 then means six +out of the 10 top hits are relevant with respect to the users information need. -P@k works well as a simple evaluation metric that has the benefit of being easy to understand and explain. -Documents in the collection need to be rated either as relevant or irrelevant with respect to the current query. -P@k does not take into account where in the top k results the relevant documents occur, so a ranking of ten results that -contains one relevant result in position 10 is equally good as a ranking of ten results that contains one relevant result in position 1. +P@k works well as a simple evaluation metric that has the benefit of being easy +to understand and explain. Documents in the collection need to be rated either +as relevant or irrelevant with respect to the current query. P@k does not take +into account where in the top k results the relevant documents occur, so a +ranking of ten results that contains one relevant result in position 10 is +equally good as a ranking of ten results that contains one relevant result in +position 1. [source,console] -------------------------------- @@ -181,13 +253,15 @@ in the query. Defaults to 10. If set to 'true', unlabeled documents are ignored and neither count as relevant or irrelevant. Set to 'false' (the default), they are treated as irrelevant. |======================================================================= + [float] ===== Mean reciprocal rank -For every query in the test suite, this metric calculates the reciprocal of the rank of the -first relevant document. For example finding the first relevant result -in position 3 means the reciprocal rank is 1/3. The reciprocal rank for each query -is averaged across all queries in the test suite to give the https://en.wikipedia.org/wiki/Mean_reciprocal_rank[mean reciprocal rank]. +For every query in the test suite, this metric calculates the reciprocal of the +rank of the first relevant document. For example finding the first relevant +result in position 3 means the reciprocal rank is 1/3. The reciprocal rank for +each query is averaged across all queries in the test suite to give the +https://en.wikipedia.org/wiki/Mean_reciprocal_rank[mean reciprocal rank]. [source,console] -------------------------------- @@ -220,12 +294,18 @@ in the query. Defaults to 10. "relevant". Defaults to `1`. |======================================================================= + [float] ===== Discounted cumulative gain (DCG) -In contrast to the two metrics above, https://en.wikipedia.org/wiki/Discounted_cumulative_gain[discounted cumulative gain] takes both, the rank and the rating of the search results, into account. +In contrast to the two metrics above, +https://en.wikipedia.org/wiki/Discounted_cumulative_gain[discounted cumulative gain] +takes both, the rank and the rating of the search results, into account. -The assumption is that highly relevant documents are more useful for the user when appearing at the top of the result list. Therefore, the DCG formula reduces the contribution that high ratings for documents on lower search ranks have on the overall DCG metric. +The assumption is that highly relevant documents are more useful for the user +when appearing at the top of the result list. Therefore, the DCG formula reduces +the contribution that high ratings for documents on lower search ranks have on +the overall DCG metric. [source,console] -------------------------------- @@ -257,23 +337,31 @@ in the query. Defaults to 10. |`normalize` | If set to `true`, this metric will calculate the https://en.wikipedia.org/wiki/Discounted_cumulative_gain#Normalized_DCG[Normalized DCG]. |======================================================================= + [float] ===== Expected Reciprocal Rank (ERR) -Expected Reciprocal Rank (ERR) is an extension of the classical reciprocal rank for the graded relevance case -(Olivier Chapelle, Donald Metzler, Ya Zhang, and Pierre Grinspan. 2009. http://olivier.chapelle.cc/pub/err.pdf[Expected reciprocal rank for graded relevance].) +Expected Reciprocal Rank (ERR) is an extension of the classical reciprocal rank +for the graded relevance case (Olivier Chapelle, Donald Metzler, Ya Zhang, and +Pierre Grinspan. 2009. +http://olivier.chapelle.cc/pub/err.pdf[Expected reciprocal rank for graded relevance].) -It is based on the assumption of a cascade model of search, in which a user scans through ranked search -results in order and stops at the first document that satisfies the information need. For this reason, it -is a good metric for question answering and navigation queries, but less so for survey oriented information -needs where the user is interested in finding many relevant documents in the top k results. +It is based on the assumption of a cascade model of search, in which a user +scans through ranked search results in order and stops at the first document +that satisfies the information need. For this reason, it is a good metric for +question answering and navigation queries, but less so for survey oriented +information needs where the user is interested in finding many relevant +documents in the top k results. -The metric models the expectation of the reciprocal of the position at which a user stops reading through -the result list. This means that relevant document in top ranking positions will contribute much to the -overall score. However, the same document will contribute much less to the score if it appears in a lower rank, -even more so if there are some relevant (but maybe less relevant) documents preceding it. -In this way, the ERR metric discounts documents which are shown after very relevant documents. This introduces -a notion of dependency in the ordering of relevant documents that e.g. Precision or DCG don't account for. +The metric models the expectation of the reciprocal of the position at which a +user stops reading through the result list. This means that relevant document in +top ranking positions will contribute much to the overall score. However, the +same document will contribute much less to the score if it appears in a lower +rank, even more so if there are some relevant (but maybe less relevant) +documents preceding it. In this way, the ERR metric discounts documents which +are shown after very relevant documents. This introduces a notion of dependency +in the ordering of relevant documents that e.g. Precision or DCG don't account +for. [source,console] -------------------------------- @@ -306,12 +394,13 @@ relevance judgments. in the query. Defaults to 10. |======================================================================= -[float] -==== Response format -The response of the `_rank_eval` endpoint contains the overall calculated result for the defined quality metric, -a `details` section with a breakdown of results for each query in the test suite and an optional `failures` section -that shows potential errors of individual queries. The response has the following format: +===== Response format + +The response of the `_rank_eval` endpoint contains the overall calculated result +for the defined quality metric, a `details` section with a breakdown of results +for each query in the test suite and an optional `failures` section that shows +potential errors of individual queries. The response has the following format: [source,js] -------------------------------- diff --git a/docs/reference/search/request-body.asciidoc b/docs/reference/search/request-body.asciidoc index b5e1a4571bd1..81f850a75716 100644 --- a/docs/reference/search/request-body.asciidoc +++ b/docs/reference/search/request-body.asciidoc @@ -122,7 +122,6 @@ The API returns the following response: "hits" : [ { "_index" : "twitter", - "_type" : "_doc", "_id" : "0", "_score": 1.3862944, "_source" : { diff --git a/docs/reference/search/request/highlighting.asciidoc b/docs/reference/search/request/highlighting.asciidoc index 73d9a1d93d78..e8171d43b17f 100644 --- a/docs/reference/search/request/highlighting.asciidoc +++ b/docs/reference/search/request/highlighting.asciidoc @@ -844,7 +844,6 @@ Response: "hits": [ { "_index": "twitter", - "_type": "_doc", "_id": "1", "_score": 1.601195, "_source": { @@ -902,7 +901,6 @@ Response: "hits": [ { "_index": "twitter", - "_type": "_doc", "_id": "1", "_score": 1.601195, "_source": { diff --git a/docs/reference/search/request/inner-hits.asciidoc b/docs/reference/search/request/inner-hits.asciidoc index 51c2c1589033..b356c2cfc2d7 100644 --- a/docs/reference/search/request/inner-hits.asciidoc +++ b/docs/reference/search/request/inner-hits.asciidoc @@ -39,7 +39,6 @@ If `inner_hits` is defined on a query that supports it then each search hit will "total": ..., "hits": [ { - "_type": ..., "_id": ..., ... }, @@ -142,7 +141,6 @@ An example of a response snippet that could be generated from the above search r "hits": [ { "_index": "test", - "_type": "_doc", "_id": "1", "_score": 1.0, "_source": ..., @@ -157,7 +155,6 @@ An example of a response snippet that could be generated from the above search r "hits": [ { "_index": "test", - "_type": "_doc", "_id": "1", "_nested": { "field": "comments", @@ -270,7 +267,6 @@ Response not included in text but tested for completeness sake. "hits": [ { "_index": "test", - "_type": "_doc", "_id": "1", "_score": 1.0444684, "_source": ..., @@ -285,7 +281,6 @@ Response not included in text but tested for completeness sake. "hits": [ { "_index": "test", - "_type": "_doc", "_id": "1", "_nested": { "field": "comments", @@ -388,7 +383,6 @@ Which would look like: "hits": [ { "_index": "test", - "_type": "_doc", "_id": "1", "_score": 0.6931472, "_source": ..., @@ -403,7 +397,6 @@ Which would look like: "hits": [ { "_index": "test", - "_type": "_doc", "_id": "1", "_nested": { "field": "comments", @@ -502,7 +495,6 @@ An example of a response snippet that could be generated from the above search r "hits": [ { "_index": "test", - "_type": "_doc", "_id": "1", "_score": 1.0, "_source": { @@ -520,7 +512,6 @@ An example of a response snippet that could be generated from the above search r "hits": [ { "_index": "test", - "_type": "_doc", "_id": "2", "_score": 1.0, "_routing": "1", diff --git a/docs/reference/search/suggesters.asciidoc b/docs/reference/search/suggesters.asciidoc index e5f715823c6c..bf1c35988d8a 100644 --- a/docs/reference/search/suggesters.asciidoc +++ b/docs/reference/search/suggesters.asciidoc @@ -1,16 +1,8 @@ [[search-suggesters]] === Suggesters -The suggest feature suggests similar looking terms based on a provided -text by using a suggester. Parts of the suggest feature are still under -development. - -The suggest request part is defined alongside the query part in a `_search` -request. If the query part is left out, only suggestions are returned. - -NOTE: `_suggest` endpoint has been deprecated in favour of using suggest via -`_search` endpoint. In 5.0, the `_search` endpoint has been optimized for -suggest only search requests. +Suggests similar looking terms based on a provided text by using a suggester. +Parts of the suggest feature are still under development. [source,console] -------------------------------------------------- @@ -33,10 +25,27 @@ POST twitter/_search -------------------------------------------------- // TEST[setup:twitter] -Several suggestions can be specified per request. Each suggestion is -identified with an arbitrary name. In the example below two suggestions -are requested. Both `my-suggest-1` and `my-suggest-2` suggestions use -the `term` suggester, but have a different `text`. + +[[search-suggesters-api-request]] +==== {api-request-title} + +The suggest feature suggests similar looking terms based on a provided text by +using a suggester. The suggest request part is defined alongside the query part +in a `_search` request. If the query part is left out, only suggestions are +returned. + +NOTE: `_suggest` endpoint has been deprecated in favour of using suggest via +`_search` endpoint. In 5.0, the `_search` endpoint has been optimized for +suggest only search requests. + + +[[search-suggesters-api-example]] +==== {api-examples-title} + +Several suggestions can be specified per request. Each suggestion is identified +with an arbitrary name. In the example below two suggestions are requested. Both +`my-suggest-1` and `my-suggest-2` suggestions use the `term` suggester, but have +a different `text`. [source,console] -------------------------------------------------- @@ -60,6 +69,7 @@ POST _search -------------------------------------------------- // TEST[setup:twitter] + The below suggest response example includes the suggestion response for `my-suggest-1` and `my-suggest-2`. Each suggestion part contains entries. Each entry is effectively a token from the suggest text and @@ -107,7 +117,7 @@ term suggester's score is based on the edit distance. [float] [[global-suggest]] -==== Global suggest text +===== Global suggest text To avoid repetition of the suggest text, it is possible to define a global text. In the example below the suggest text is defined globally diff --git a/docs/reference/search/suggesters/completion-suggest.asciidoc b/docs/reference/search/suggesters/completion-suggest.asciidoc index 048f27b4e349..2aeda557c1da 100644 --- a/docs/reference/search/suggesters/completion-suggest.asciidoc +++ b/docs/reference/search/suggesters/completion-suggest.asciidoc @@ -186,7 +186,6 @@ returns this response: "options" : [ { "text" : "Nirvana", "_index": "music", - "_type": "_doc", "_id": "1", "_score": 1.0, "_source": { @@ -264,7 +263,6 @@ Which should look like: "options" : [ { "text" : "Nirvana", "_index": "music", - "_type": "_doc", "_id": "1", "_score": 1.0, "_source": { diff --git a/docs/reference/search/uri-request.asciidoc b/docs/reference/search/uri-request.asciidoc index 1c523fae2a91..ff234f415a3d 100644 --- a/docs/reference/search/uri-request.asciidoc +++ b/docs/reference/search/uri-request.asciidoc @@ -138,7 +138,6 @@ The API returns the following response: "hits" : [ { "_index" : "twitter", - "_type" : "_doc", "_id" : "0", "_score": 1.3862944, "_source" : { diff --git a/docs/reference/settings/ilm-settings.asciidoc b/docs/reference/settings/ilm-settings.asciidoc index 80c20b59b2bf..97ae65d18152 100644 --- a/docs/reference/settings/ilm-settings.asciidoc +++ b/docs/reference/settings/ilm-settings.asciidoc @@ -27,6 +27,14 @@ information about rollover, see <>. (<>) How often {ilm} checks for indices that meet policy criteria. Defaults to `10m`. +`index.lifecycle.parse_origination_date`:: +When configured to `true` the origination date will be parsed from the index +name. The index format must match the pattern `^.*-{date_format}-\\d+`, where +the `date_format` is `yyyy.MM.dd` and the trailing digits are optional (an +index that was rolled over would normally match the full format eg. +`logs-2016.10.31-000002`). If the index name doesn't match the pattern +the index creation will fail. + `index.lifecycle.origination_date`:: The timestamp that will be used to calculate the index age for its phase transitions. This allows the users to create an index containing old data and diff --git a/docs/reference/sql/functions/geo.asciidoc b/docs/reference/sql/functions/geo.asciidoc index bb8680ac183c..c90d833919ea 100644 --- a/docs/reference/sql/functions/geo.asciidoc +++ b/docs/reference/sql/functions/geo.asciidoc @@ -5,14 +5,15 @@ beta[] -The geo functions work with geometries stored in `geo_point` and `geo_shape` fields, or returned by other geo functions. +The geo functions work with geometries stored in `geo_point`, `geo_shape` and `shape` fields, or returned by other geo functions. ==== Limitations -Both <> and <> types are represented in SQL as geometry and can be used -interchangeably with the following exceptions: +<>, <> and <> and types are represented in SQL as +geometry and can be used interchangeably with the following exceptions: -* `geo_shape` fields don't have doc values, therefore these fields cannot be used for filtering, grouping or sorting. +* `geo_shape` and `shape` fields don't have doc values, therefore these fields cannot be used for filtering, grouping + or sorting. * `geo_points` fields are indexed and have doc values by default, however only latitude and longitude are stored and indexed with some loss of precision from the original values (4.190951585769653E-8 for the latitude and diff --git a/docs/reference/sql/language/data-types.asciidoc b/docs/reference/sql/language/data-types.asciidoc index ee73a1eea7cb..811bb1ac6a47 100644 --- a/docs/reference/sql/language/data-types.asciidoc +++ b/docs/reference/sql/language/data-types.asciidoc @@ -83,6 +83,7 @@ s|SQL precision | interval_minute_to_second | 23 | geo_point | 52 | geo_shape | 2,147,483,647 +| shape | 2,147,483,647 |=== diff --git a/docs/reference/transform/api-quickref.asciidoc b/docs/reference/transform/api-quickref.asciidoc index 9d2590a1540e..d2dff5c3021d 100644 --- a/docs/reference/transform/api-quickref.asciidoc +++ b/docs/reference/transform/api-quickref.asciidoc @@ -1,6 +1,6 @@ [role="xpack"] -[[df-api-quickref]] -== API quick reference +[[transform-api-quickref]] +=== API quick reference All {transform} endpoints have the following base: diff --git a/docs/reference/transform/apis/index.asciidoc b/docs/reference/transform/apis/index.asciidoc index e496401d340d..80384af95980 100644 --- a/docs/reference/transform/apis/index.asciidoc +++ b/docs/reference/transform/apis/index.asciidoc @@ -3,8 +3,6 @@ [[transform-apis]] == {transform-cap} APIs -See also {stack-ov}/ml-dataframes.html[{transforms-cap}]. - * <> * <> * <> diff --git a/docs/reference/transform/apis/put-transform.asciidoc b/docs/reference/transform/apis/put-transform.asciidoc index 49c3d7981ddb..d72f2722f4e0 100644 --- a/docs/reference/transform/apis/put-transform.asciidoc +++ b/docs/reference/transform/apis/put-transform.asciidoc @@ -37,8 +37,7 @@ entities are defined by the set of `group_by` fields in the `pivot` object. You can also think of the destination index as a two-dimensional tabular data structure (known as a {dataframe}). The ID for each document in the {dataframe} is generated from a hash of the entity, so there is a unique row -per entity. For more information, see -{stack-ov}/ml-dataframes.html[{transforms-cap}]. +per entity. For more information, see <>. When the {transform} is created, a series of validations occur to ensure its success. For example, there is a check for the existence of the diff --git a/docs/reference/transform/apis/transformresource.asciidoc b/docs/reference/transform/apis/transformresource.asciidoc index 55b2095a6ec8..190f827cd8e1 100644 --- a/docs/reference/transform/apis/transformresource.asciidoc +++ b/docs/reference/transform/apis/transformresource.asciidoc @@ -5,8 +5,7 @@ {transform-cap} resources relate to the <>. -For more information, see -{stack-ov}/ecommerce-dataframes.html[Transforming your data with {dataframes}]. +For more information, see <>. [discrete] [[transform-properties]] @@ -101,8 +100,7 @@ pivot function `group by` fields and the aggregation to reduce the data. * {ref}/search-aggregations-pipeline-bucket-selector-aggregation.html[Bucket Selector] IMPORTANT: {transforms-cap} support a subset of the functionality in -composite aggregations. See -{stack-ov}/dataframe-limitations.html[{dataframe-cap} limitations]. +composite aggregations. See <>. -- diff --git a/docs/reference/transform/checkpoints.asciidoc b/docs/reference/transform/checkpoints.asciidoc index 4c41b876b23c..379834e8553d 100644 --- a/docs/reference/transform/checkpoints.asciidoc +++ b/docs/reference/transform/checkpoints.asciidoc @@ -1,6 +1,6 @@ [role="xpack"] -[[ml-transform-checkpoints]] -== How {transform} checkpoints work +[[transform-checkpoints]] +=== How {transform} checkpoints work ++++ How checkpoints work ++++ diff --git a/docs/reference/transform/ecommerce-example.asciidoc b/docs/reference/transform/ecommerce-tutorial.asciidoc similarity index 96% rename from docs/reference/transform/ecommerce-example.asciidoc rename to docs/reference/transform/ecommerce-tutorial.asciidoc index 7f8267baa165..026127f97bac 100644 --- a/docs/reference/transform/ecommerce-example.asciidoc +++ b/docs/reference/transform/ecommerce-tutorial.asciidoc @@ -1,11 +1,11 @@ [role="xpack"] [testenv="basic"] -[[ecommerce-dataframes]] -=== Transforming the eCommerce sample data +[[ecommerce-transforms]] +=== Tutorial: Transforming the eCommerce sample data beta[] -<> enable you to retrieve information +<> enable you to retrieve information from an {es} index, transform it, and store it in another index. Let's use the {kibana-ref}/add-sample-data.html[{kib} sample data] to demonstrate how you can pivot and summarize your data with {transforms}. @@ -23,7 +23,9 @@ You also need `read` and `view_index_metadata` index privileges on the source index and `read`, `create_index`, and `index` privileges on the destination index. -For more information, see <> and <>. +For more information, see +{stack-ov}/security-privileges.html[Security privileges] and +{stack-ov}/built-in-roles.html[Built-in roles]. -- . Choose your _source index_. diff --git a/docs/reference/transform/dataframe-examples.asciidoc b/docs/reference/transform/examples.asciidoc similarity index 97% rename from docs/reference/transform/dataframe-examples.asciidoc rename to docs/reference/transform/examples.asciidoc index 6c03ad3ecb3e..6b15b0517e76 100644 --- a/docs/reference/transform/dataframe-examples.asciidoc +++ b/docs/reference/transform/examples.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] -[[dataframe-examples]] -== {transform-cap} examples +[[transform-examples]] +=== {transform-cap} examples ++++ Examples ++++ @@ -12,17 +12,14 @@ These examples demonstrate how to use {transforms} to derive useful insights from your data. All the examples use one of the {kibana-ref}/add-sample-data.html[{kib} sample datasets]. For a more detailed, step-by-step example, see -<>. +<>. -* <> * <> * <> * <> -include::ecommerce-example.asciidoc[] - [[example-best-customers]] -=== Finding your best customers +==== Finding your best customers In this example, we use the eCommerce orders sample dataset to find the customers who spent the most in our hypothetical webshop. Let's transform the data such @@ -106,7 +103,7 @@ navigate data from a customer centric perspective. In some cases, it can even make creating visualizations much simpler. [[example-airline]] -=== Finding air carriers with the most delays +==== Finding air carriers with the most delays In this example, we use the Flights sample dataset to find out which air carrier had the most delays. First, we filter the source data such that it excludes all @@ -193,7 +190,7 @@ or flight stats for any of the featured destination or origin airports. [[example-clientips]] -=== Finding suspicious client IPs by using scripted metrics +==== Finding suspicious client IPs by using scripted metrics With {transforms}, you can use {ref}/search-aggregations-metrics-scripted-metric-aggregation.html[scripted diff --git a/docs/reference/transform/index.asciidoc b/docs/reference/transform/index.asciidoc index 41ffd97ee399..595cbdef56d9 100644 --- a/docs/reference/transform/index.asciidoc +++ b/docs/reference/transform/index.asciidoc @@ -1,27 +1,28 @@ [role="xpack"] -[[ml-dataframes]] -= Transforming data - -[partintro] --- +[[transforms]] +== Transforming data +// tag::transform-intro[] {transforms-cap} enable you to convert existing {es} indices into summarized -indices, which provide opportunities for new insights and analytics. For example, -you can use {transforms} to pivot your data into entity-centric indices that -summarize the behavior of users or sessions or other entities in your data. +indices, which provide opportunities for new insights and analytics. +// end::transform-intro[] +For example, you can use {transforms} to pivot your data into entity-centric +indices that summarize the behavior of users or sessions or other entities in +your data. -* <> -* <> -* <> -* <> -* <> -* <> --- +* <> +* <> +* <> +* <> +* <> +* <> +* <> include::overview.asciidoc[] include::usage.asciidoc[] include::checkpoints.asciidoc[] include::api-quickref.asciidoc[] -include::dataframe-examples.asciidoc[] +include::ecommerce-tutorial.asciidoc[] +include::examples.asciidoc[] include::troubleshooting.asciidoc[] include::limitations.asciidoc[] \ No newline at end of file diff --git a/docs/reference/transform/limitations.asciidoc b/docs/reference/transform/limitations.asciidoc index a97737464b3f..70a3ffb25432 100644 --- a/docs/reference/transform/limitations.asciidoc +++ b/docs/reference/transform/limitations.asciidoc @@ -1,6 +1,6 @@ [role="xpack"] -[[dataframe-limitations]] -== {transform-cap} limitations +[[transform-limitations]] +=== {transform-cap} limitations [subs="attributes"] ++++ Limitations @@ -12,8 +12,8 @@ The following limitations and known problems apply to the 7.4 release of the Elastic {dataframe} feature: [float] -[[df-compatibility-limitations]] -=== Beta {transforms} do not have guaranteed backwards or forwards compatibility +[[transform-compatibility-limitations]] +==== Beta {transforms} do not have guaranteed backwards or forwards compatibility Whilst {transforms} are beta, it is not guaranteed that a {transform} created in a previous version of the {stack} will be able @@ -25,8 +25,8 @@ destination index. This is a normal {es} index and is not affected by the beta status. [float] -[[df-ui-limitation]] -=== {dataframe-cap} UI will not work during a rolling upgrade from 7.2 +[[transform-ui-limitation]] +==== {dataframe-cap} UI will not work during a rolling upgrade from 7.2 If your cluster contains mixed version nodes, for example during a rolling upgrade from 7.2 to a newer version, and {transforms} have been @@ -35,22 +35,22 @@ have been upgraded to the newer version before using the {dataframe} UI. [float] -[[df-datatype-limitations]] -=== {dataframe-cap} data type limitation +[[transform-datatype-limitations]] +==== {dataframe-cap} data type limitation {dataframes-cap} do not (yet) support fields containing arrays – in the UI or the API. If you try to create one, the UI will fail to show the source index table. [float] -[[df-ccs-limitations]] -=== {ccs-cap} is not supported +[[transform-ccs-limitations]] +==== {ccs-cap} is not supported {ccs-cap} is not supported for {transforms}. [float] -[[df-kibana-limitations]] -=== Up to 1,000 {transforms} are supported +[[transform-kibana-limitations]] +==== Up to 1,000 {transforms} are supported A single cluster will support up to 1,000 {transforms}. When using the @@ -59,8 +59,8 @@ When using the enumerate through the full list. [float] -[[df-aggresponse-limitations]] -=== Aggregation responses may be incompatible with destination index mappings +[[transform-aggresponse-limitations]] +==== Aggregation responses may be incompatible with destination index mappings When a {transform} is first started, it will deduce the mappings required for the destination index. This process is based on the field types of @@ -77,8 +77,8 @@ workaround, you may define custom mappings prior to starting the {ref}/indices-templates.html[define an index template]. [float] -[[df-batch-limitations]] -=== Batch {transforms} may not account for changed documents +[[transform-batch-limitations]] +==== Batch {transforms} may not account for changed documents A batch {transform} uses a {ref}/search-aggregations-bucket-composite-aggregation.html[composite aggregation] @@ -88,8 +88,8 @@ do not yet support a search context, therefore if the source data is changed results may not include these changes. [float] -[[df-consistency-limitations]] -=== {cdataframe-cap} consistency does not account for deleted or updated documents +[[transform-consistency-limitations]] +==== {cdataframe-cap} consistency does not account for deleted or updated documents While the process for {transforms} allows the continual recalculation of the {transform} as new data is being ingested, it does also have @@ -114,16 +114,16 @@ updated when viewing the {dataframe} destination index. [float] -[[df-deletion-limitations]] -=== Deleting a {transform} does not delete the {dataframe} destination index or {kib} index pattern +[[transform-deletion-limitations]] +==== Deleting a {transform} does not delete the {dataframe} destination index or {kib} index pattern When deleting a {transform} using `DELETE _data_frame/transforms/index` neither the {dataframe} destination index nor the {kib} index pattern, should one have been created, are deleted. These objects must be deleted separately. [float] -[[df-aggregation-page-limitations]] -=== Handling dynamic adjustment of aggregation page size +[[transform-aggregation-page-limitations]] +==== Handling dynamic adjustment of aggregation page size During the development of {transforms}, control was favoured over performance. In the design considerations, it is preferred for the @@ -153,8 +153,8 @@ requested has been reduced to its minimum, then the {transform} will be set to a failed state. [float] -[[df-dynamic-adjustments-limitations]] -=== Handling dynamic adjustments for many terms +[[transform-dynamic-adjustments-limitations]] +==== Handling dynamic adjustments for many terms For each checkpoint, entities are identified that have changed since the last time the check was performed. This list of changed entities is supplied as a @@ -176,8 +176,8 @@ Using smaller values for `max_page_search_size` may result in a longer duration for the {transform} checkpoint to complete. [float] -[[df-scheduling-limitations]] -=== {cdataframe-cap} scheduling limitations +[[transform-scheduling-limitations]] +==== {cdataframe-cap} scheduling limitations A {cdataframe} periodically checks for changes to source data. The functionality of the scheduler is currently limited to a basic periodic timer which can be @@ -188,8 +188,8 @@ search/index operations has other users in your cluster. Also note that retries occur at `frequency` interval. [float] -[[df-failed-limitations]] -=== Handling of failed {transforms} +[[transform-failed-limitations]] +==== Handling of failed {transforms} Failed {transforms} remain as a persistent task and should be handled appropriately, either by deleting it or by resolving the root cause of the @@ -199,8 +199,8 @@ When using the API to delete a failed {transform}, first stop it using `_stop?force=true`, then delete it. [float] -[[df-availability-limitations]] -=== {cdataframes-cap} may give incorrect results if documents are not yet available to search +[[transform-availability-limitations]] +==== {cdataframes-cap} may give incorrect results if documents are not yet available to search After a document is indexed, there is a very small delay until it is available to search. diff --git a/docs/reference/transform/overview.asciidoc b/docs/reference/transform/overview.asciidoc index fa161f2e9ea3..e3c852d8be94 100644 --- a/docs/reference/transform/overview.asciidoc +++ b/docs/reference/transform/overview.asciidoc @@ -1,6 +1,6 @@ [role="xpack"] -[[ml-transform-overview]] -== {transform-cap} overview +[[transform-overview]] +=== {transform-cap} overview ++++ Overview ++++ diff --git a/docs/reference/transform/troubleshooting.asciidoc b/docs/reference/transform/troubleshooting.asciidoc index 9d76e93415db..b453529a65fe 100644 --- a/docs/reference/transform/troubleshooting.asciidoc +++ b/docs/reference/transform/troubleshooting.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] -[[dataframe-troubleshooting]] -== Troubleshooting {transforms} +[[transform-troubleshooting]] +=== Troubleshooting {transforms} [subs="attributes"] ++++ Troubleshooting @@ -9,7 +9,11 @@ Use the information in this section to troubleshoot common problems. -include::{stack-repo-dir}/help.asciidoc[tag=get-help] +For issues that you cannot fix yourself … we’re here to help. +If you are an existing Elastic customer with a support contract, please create +a ticket in the +https://support.elastic.co/customers/s/login/[Elastic Support portal]. +Or post in the https://discuss.elastic.co/[Elastic forum]. If you encounter problems with your {transforms}, you can gather more information from the following files and APIs: diff --git a/docs/reference/transform/usage.asciidoc b/docs/reference/transform/usage.asciidoc index 70dfe0f80b39..f78a0388bcb8 100644 --- a/docs/reference/transform/usage.asciidoc +++ b/docs/reference/transform/usage.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] -[[ml-transforms-usage]] -== When to use {transforms} +[[transform-usage]] +=== When to use {transforms} {es} aggregations are a powerful and flexible feature that enable you to summarize and retrieve complex insights about your data. You can summarize diff --git a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index ffdabd6d6339..14fef4334464 100644 --- a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -101,7 +101,7 @@ public class DocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @After public void cleanup() throws Exception { - if (isMachineLearningTest() || isDataFrameTest()) { + if (isMachineLearningTest() || isTransformTest()) { ESRestTestCase.waitForPendingTasks(adminClient()); } } @@ -111,9 +111,9 @@ public class DocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { return testName != null && (testName.contains("/ml/") || testName.contains("\\ml\\")); } - protected boolean isDataFrameTest() { + protected boolean isTransformTest() { String testName = getTestName(); - return testName != null && (testName.contains("/data-frames/") || testName.contains("\\data-frames\\")); + return testName != null && (testName.contains("/transform/") || testName.contains("\\transform\\")); } /** diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java index afde17d9ad01..5141ccd95ab4 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java @@ -19,11 +19,9 @@ package org.elasticsearch.analysis.common; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.cjk.CJKBigramFilter; import org.apache.lucene.analysis.miscellaneous.DisableGraphAttribute; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -51,9 +49,6 @@ import java.util.Set; */ public final class CJKBigramFilterFactory extends AbstractTokenFilterFactory { - private static final DeprecationLogger DEPRECATION_LOGGER - = new DeprecationLogger(LogManager.getLogger(CJKBigramFilterFactory.class)); - private final int flags; private final boolean outputUnigrams; diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactory.java index a3283769972a..99162888e6fc 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactory.java @@ -19,12 +19,10 @@ package org.elasticsearch.analysis.common; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.commongrams.CommonGramsFilter; import org.apache.lucene.analysis.commongrams.CommonGramsQueryFilter; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -34,9 +32,6 @@ import org.elasticsearch.index.analysis.TokenFilterFactory; public class CommonGramsTokenFilterFactory extends AbstractTokenFilterFactory { - private static final DeprecationLogger DEPRECATION_LOGGER - = new DeprecationLogger(LogManager.getLogger(CommonGramsTokenFilterFactory.class)); - private final CharArraySet words; private final boolean ignoreCase; diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactory.java index bdbf6498bce5..2e20ca7ad6df 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactory.java @@ -19,11 +19,9 @@ package org.elasticsearch.analysis.common; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; import org.apache.lucene.analysis.reverse.ReverseStringFilter; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -33,9 +31,6 @@ import org.elasticsearch.index.analysis.TokenFilterFactory; public class EdgeNGramTokenFilterFactory extends AbstractTokenFilterFactory { - private static final DeprecationLogger DEPRECATION_LOGGER - = new DeprecationLogger(LogManager.getLogger(EdgeNGramTokenFilterFactory.class)); - private final int minGram; private final int maxGram; diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FingerprintTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FingerprintTokenFilterFactory.java index 914c157324e2..cbf31c466849 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FingerprintTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FingerprintTokenFilterFactory.java @@ -19,10 +19,8 @@ package org.elasticsearch.analysis.common; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.FingerprintFilter; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -34,9 +32,6 @@ import static org.elasticsearch.analysis.common.FingerprintAnalyzerProvider.MAX_ public class FingerprintTokenFilterFactory extends AbstractTokenFilterFactory { - private static final DeprecationLogger DEPRECATION_LOGGER - = new DeprecationLogger(LogManager.getLogger(FingerprintTokenFilterFactory.class)); - private final char separator; private final int maxOutputSize; diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterFactory.java index a20ed8c1e43f..122214aa8911 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterFactory.java @@ -19,14 +19,12 @@ package org.elasticsearch.analysis.common; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.ConditionalTokenFilter; import org.apache.lucene.analysis.miscellaneous.RemoveDuplicatesTokenFilter; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -42,9 +40,6 @@ import java.util.function.Function; public class MultiplexerTokenFilterFactory extends AbstractTokenFilterFactory { - private static final DeprecationLogger DEPRECATION_LOGGER - = new DeprecationLogger(LogManager.getLogger(MultiplexerTokenFilterFactory.class)); - private List filterNames; private final boolean preserveOriginal; diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenFilterFactory.java index ab8d9e4d76e8..5c8d2f6003a5 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenFilterFactory.java @@ -19,10 +19,8 @@ package org.elasticsearch.analysis.common; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ngram.NGramTokenFilter; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -32,9 +30,6 @@ import org.elasticsearch.index.analysis.TokenFilterFactory; public class NGramTokenFilterFactory extends AbstractTokenFilterFactory { - private static final DeprecationLogger DEPRECATION_LOGGER - = new DeprecationLogger(LogManager.getLogger(NGramTokenFilterFactory.class)); - private final int minGram; private final int maxGram; diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactory.java index 6285d25e01f1..bd7cd7ed3c5d 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactory.java @@ -19,12 +19,10 @@ package org.elasticsearch.analysis.common; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.WordDelimiterGraphFilter; import org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -48,9 +46,6 @@ import static org.elasticsearch.analysis.common.WordDelimiterTokenFilterFactory. public class WordDelimiterGraphTokenFilterFactory extends AbstractTokenFilterFactory { - private static final DeprecationLogger DEPRECATION_LOGGER = - new DeprecationLogger(LogManager.getLogger(WordDelimiterGraphTokenFilterFactory.class)); - private final byte[] charTypeTable; private final int flags; private final CharArraySet protoWords; diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterTokenFilterFactory.java index f33687ee2243..05bf35454258 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WordDelimiterTokenFilterFactory.java @@ -19,12 +19,10 @@ package org.elasticsearch.analysis.common; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter; import org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -53,9 +51,6 @@ import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.STEM_ public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory { - private static final DeprecationLogger DEPRECATION_LOGGER = - new DeprecationLogger(LogManager.getLogger(WordDelimiterTokenFilterFactory.class)); - private final byte[] charTypeTable; private final int flags; private final CharArraySet protoWords; diff --git a/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentProcessor.java b/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentProcessor.java index d83762a5e47a..d1c8464de978 100644 --- a/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentProcessor.java +++ b/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentProcessor.java @@ -25,7 +25,6 @@ import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.useragent.UserAgentParser.Details; -import org.elasticsearch.ingest.useragent.UserAgentParser.VersionedName; import java.util.Arrays; import java.util.EnumSet; @@ -149,37 +148,6 @@ public class UserAgentProcessor extends AbstractProcessor { return ingestDocument; } - /** To maintain compatibility with logstash-filter-useragent */ - private String buildFullOSName(VersionedName operatingSystem) { - if (operatingSystem == null || operatingSystem.name == null) { - return null; - } - - StringBuilder sb = new StringBuilder(operatingSystem.name); - - if (operatingSystem.major != null) { - sb.append(" "); - sb.append(operatingSystem.major); - - if (operatingSystem.minor != null) { - sb.append("."); - sb.append(operatingSystem.minor); - - if (operatingSystem.patch != null) { - sb.append("."); - sb.append(operatingSystem.patch); - - if (operatingSystem.build != null) { - sb.append("."); - sb.append(operatingSystem.build); - } - } - } - } - - return sb.toString(); - } - @Override public String getType() { return TYPE; diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java index c02c31155018..1d93e202d1a1 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java @@ -23,7 +23,6 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -165,7 +164,7 @@ public class SearchTemplateResponseTests extends AbstractXContentTestCase compile(Loader loader, Set extractedVariables, String name, String source, CompilerSettings settings) { ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, scriptClass); - SSource root = Walker.buildPainlessTree(scriptClassInfo, name, source, settings, painlessLookup, null); + SClass root = Walker.buildPainlessTree(scriptClassInfo, name, source, settings, painlessLookup, null); root.extractVariables(extractedVariables); root.storeSettings(settings); root.analyze(painlessLookup); @@ -227,7 +227,8 @@ final class Compiler { } return clazz.getConstructors()[0]; - } catch (Exception exception) { // Catch everything to let the user know this is something caused internally. + } catch (Exception exception) { + // Catch everything to let the user know this is something caused internally. throw new IllegalStateException("An internal error occurred attempting to define the script [" + name + "].", exception); } } @@ -240,7 +241,7 @@ final class Compiler { */ byte[] compile(String name, String source, CompilerSettings settings, Printer debugStream) { ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, scriptClass); - SSource root = Walker.buildPainlessTree(scriptClassInfo, name, source, settings, painlessLookup, + SClass root = Walker.buildPainlessTree(scriptClassInfo, name, source, settings, painlessLookup, debugStream); root.extractVariables(new HashSet<>()); root.storeSettings(settings); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/LambdaBootstrap.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/LambdaBootstrap.java index d95dc4266889..9db9011f0590 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/LambdaBootstrap.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/LambdaBootstrap.java @@ -60,32 +60,35 @@ import static org.objectweb.asm.Opcodes.H_NEWINVOKESPECIAL; * {@link java.lang.invoke.LambdaMetafactory} since the Painless casting model * cannot be fully supported through this class. * - * For each lambda function/method reference used within a Painless script + *

For each lambda function/method reference used within a Painless script * a class will be generated at link-time using the * {@link LambdaBootstrap#lambdaBootstrap} method that contains the following: - * 1. member fields for any captured variables - * 2. a constructor that will take in captured variables and assign them to + * + *

    + *
  1. member fields for any captured variables + *
  2. a constructor that will take in captured variables and assign them to * their respective member fields - * 3. a static ctor delegation method, if the lambda function is a ctor. - * 4. a method that will load the member fields representing captured variables + *
  3. a static ctor delegation method, if the lambda function is a ctor. + *
  4. a method that will load the member fields representing captured variables * and take in any other necessary values based on the arguments passed into the * lambda function/reference method; it will then make a delegated call to the - * actual lambda function/reference method + * actual lambda function/reference method. + *
* - * Take for example the following Painless script: + *

Take for example the following Painless script: * - * {@code + *

{@code
  * List list1 = new ArrayList(); "
  * list1.add(2); "
  * List list2 = new ArrayList(); "
  * list1.forEach(x -> list2.add(x));"
  * return list[0]"
- * }
+ * }
* - * The script contains a lambda function with a captured variable. + *

The script contains a lambda function with a captured variable. * The following Lambda class would be generated: * - * {@code + *

{@code
  *     public static final class $$Lambda0 implements Consumer {
  *         private List arg$0;
  *
@@ -109,9 +112,9 @@ import static org.objectweb.asm.Opcodes.H_NEWINVOKESPECIAL;
  *         }
  *         ...
  *     }
- * }
+ * }
* - * Also the accept method actually uses an invokedynamic + *

Also the accept method actually uses an invokedynamic * instruction to call the lambda$0 method so that * {@link MethodHandle#asType} can be used to do the necessary * conversions between argument types without having to hard @@ -120,7 +123,7 @@ import static org.objectweb.asm.Opcodes.H_NEWINVOKESPECIAL; * calls the constructor. This method is used by the * invokedynamic call to initialize the instance. * - * When the {@link CallSite} is linked the linked method depends + *

When the {@link CallSite} is linked the linked method depends * on whether or not there are captures. If there are no captures * the same instance of the generated lambda class will be * returned each time by the factory method as there are no diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java index ce3f1936f26e..59ef3e35af35 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java @@ -316,7 +316,8 @@ public final class PainlessScriptEngine implements ScriptEngine { try { return context.factoryClazz.cast(factory.getConstructor().newInstance()); - } catch (Exception exception) { // Catch everything to let the user know this is something caused internally. + } catch (Exception exception) { + // Catch everything to let the user know this is something caused internally. throw new IllegalStateException( "An internal error occurred attempting to define the factory class [" + className + "].", exception); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java index 9d550ce5f5dd..ec56bf2845a2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java @@ -152,7 +152,7 @@ import org.elasticsearch.painless.node.SFunction; import org.elasticsearch.painless.node.SIf; import org.elasticsearch.painless.node.SIfElse; import org.elasticsearch.painless.node.SReturn; -import org.elasticsearch.painless.node.SSource; +import org.elasticsearch.painless.node.SClass; import org.elasticsearch.painless.node.SThrow; import org.elasticsearch.painless.node.STry; import org.elasticsearch.painless.node.SWhile; @@ -166,14 +166,14 @@ import java.util.List; */ public final class Walker extends PainlessParserBaseVisitor { - public static SSource buildPainlessTree(ScriptClassInfo mainMethod, String sourceName, + public static SClass buildPainlessTree(ScriptClassInfo mainMethod, String sourceName, String sourceText, CompilerSettings settings, PainlessLookup painlessLookup, Printer debugStream) { return new Walker(mainMethod, sourceName, sourceText, settings, painlessLookup, debugStream).source; } private final ScriptClassInfo scriptClassInfo; - private final SSource source; + private final SClass source; private final CompilerSettings settings; private final Printer debugStream; private final String sourceName; @@ -188,7 +188,7 @@ public final class Walker extends PainlessParserBaseVisitor { this.sourceName = Location.computeSourceName(sourceName); this.sourceText = sourceText; this.painlessLookup = painlessLookup; - this.source = (SSource)visit(buildAntlrTree(sourceText)); + this.source = (SClass)visit(buildAntlrTree(sourceText)); } private SourceContext buildAntlrTree(String source) { @@ -245,7 +245,7 @@ public final class Walker extends PainlessParserBaseVisitor { statements.add((AStatement)visit(statement)); } - return new SSource(scriptClassInfo, sourceName, sourceText, debugStream, location(ctx), functions, statements); + return new SClass(scriptClassInfo, sourceName, sourceText, debugStream, location(ctx), functions, statements); } @Override @@ -272,7 +272,7 @@ public final class Walker extends PainlessParserBaseVisitor { statements.add((AStatement)visit(ctx.block().dstatement())); } - return new SFunction(location(ctx), rtnType, name, paramTypes, paramNames, statements, false); + return new SFunction(location(ctx), rtnType, name, paramTypes, paramNames, new SBlock(location(ctx), statements), false); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java index 42ffe0a15828..629449c1a58b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java @@ -29,7 +29,7 @@ import org.objectweb.asm.Label; public abstract class AStatement extends ANode { /** - * Set to true when the final statement in an {@link SSource} is reached. + * Set to true when the final statement in an {@link SClass} is reached. * Used to determine whether or not an auto-return is necessary. */ boolean lastSource = false; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java index 23c9c91f2043..1d016a4ce9ae 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java @@ -291,7 +291,8 @@ public final class EAssignment extends AExpression { // from dup the value onto the stack } - lhs.store(writer, globals); // store the lhs's value from the stack in its respective variable/field/array + // store the lhs's value from the stack in its respective variable/field/array + lhs.store(writer, globals); } else if (operation != null) { // Handle the case where we are doing a compound assignment that // does not represent a String concatenation. @@ -309,9 +310,9 @@ public final class EAssignment extends AExpression { // to the promotion type between the lhs and rhs types rhs.write(writer, globals); // write the bytecode for the rhs - // XXX: fix these types, but first we need def compound assignment tests. - // its tricky here as there are possibly explicit casts, too. - // write the operation instruction for compound assignment + // XXX: fix these types, but first we need def compound assignment tests. + // its tricky here as there are possibly explicit casts, too. + // write the operation instruction for compound assignment if (promote == def.class) { writer.writeDynamicBinaryInstruction( location, promote, def.class, def.class, operation, DefBootstrap.OPERATOR_COMPOUND_ASSIGNMENT); @@ -322,23 +323,24 @@ public final class EAssignment extends AExpression { writer.writeCast(back); // if necessary cast the promotion type value back to the lhs's type if (lhs.read && !post) { - writer.writeDup(MethodWriter.getType(lhs.actual).getSize(), lhs.accessElementCount()); // dup the value if the lhs is also - // read from and is not a post - // increment + // dup the value if the lhs is also read from and is not a post increment + writer.writeDup(MethodWriter.getType(lhs.actual).getSize(), lhs.accessElementCount()); } - lhs.store(writer, globals); // store the lhs's value from the stack in its respective variable/field/array + // store the lhs's value from the stack in its respective variable/field/array + lhs.store(writer, globals); } else { // Handle the case for a simple write. rhs.write(writer, globals); // write the bytecode for the rhs rhs if (lhs.read) { - writer.writeDup(MethodWriter.getType(lhs.actual).getSize(), lhs.accessElementCount()); // dup the value if the lhs - // is also read from + // dup the value if the lhs is also read from + writer.writeDup(MethodWriter.getType(lhs.actual).getSize(), lhs.accessElementCount()); } - lhs.store(writer, globals); // store the lhs's value from the stack in its respective variable/field/array + // store the lhs's value from the stack in its respective variable/field/array + lhs.store(writer, globals); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java index 139d7ce097fe..bd247caf8d56 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java @@ -175,8 +175,8 @@ public final class ELambda extends AExpression implements ILambda { // desugar lambda body into a synthetic method String name = locals.getNextSyntheticName(); - desugared = new SFunction( - location, PainlessLookupUtility.typeToCanonicalTypeName(returnType), name, paramTypes, paramNames, statements, true); + desugared = new SFunction(location, PainlessLookupUtility.typeToCanonicalTypeName(returnType), name, paramTypes, paramNames, + new SBlock(location, statements), true); desugared.storeSettings(settings); desugared.generateSignature(locals.getPainlessLookup()); desugared.analyze(Locals.newLambdaScope(locals.getProgramScope(), desugared.name, returnType, diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArrayFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArrayFunctionRef.java index 68fa5b1aec0c..39a4b9eadd03 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArrayFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArrayFunctionRef.java @@ -28,6 +28,7 @@ import org.elasticsearch.painless.MethodWriter; import org.objectweb.asm.Type; import java.util.Arrays; +import java.util.Collections; import java.util.Objects; import java.util.Set; @@ -61,9 +62,11 @@ public final class ENewArrayFunctionRef extends AExpression implements ILambda { @Override void analyze(Locals locals) { - SReturn code = new SReturn(location, new ENewArray(location, type, Arrays.asList(new EVariable(location, "size")), false)); + SReturn code = new SReturn(location, + new ENewArray(location, type, Arrays.asList(new EVariable(location, "size")), false)); function = new SFunction(location, type, locals.getNextSyntheticName(), - Arrays.asList("int"), Arrays.asList("size"), Arrays.asList(code), true); + Collections.singletonList("int"), Collections.singletonList("size"), + new SBlock(location, Collections.singletonList(code)), true); function.storeSettings(settings); function.generateSignature(locals.getPainlessLookup()); function.extractVariables(null); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java index b00d2305d4b8..8cfd32a28448 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java @@ -36,7 +36,7 @@ import static java.util.Collections.emptyList; */ public final class SBlock extends AStatement { - private final List statements; + final List statements; public SBlock(Location location, List statements) { super(location); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SClass.java similarity index 99% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SClass.java index fd8129c4294e..0d15aec23b17 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SClass.java @@ -78,7 +78,7 @@ import static org.elasticsearch.painless.WriterConstants.STRING_TYPE; /** * The root of all Painless trees. Contains a series of statements. */ -public final class SSource extends AStatement { +public final class SClass extends AStatement { private final ScriptClassInfo scriptClassInfo; private final String name; @@ -94,7 +94,7 @@ public final class SSource extends AStatement { private final List getMethods; private byte[] bytes; - public SSource(ScriptClassInfo scriptClassInfo, String name, String sourceText, Printer debugStream, + public SClass(ScriptClassInfo scriptClassInfo, String name, String sourceText, Printer debugStream, Location location, List functions, List statements) { super(location); this.scriptClassInfo = Objects.requireNonNull(scriptClassInfo); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java index 4f8c68c4088f..21f64f16352f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java @@ -50,7 +50,7 @@ public final class SFunction extends AStatement { public final String name; private final List paramTypeStrs; private final List paramNameStrs; - private final List statements; + private final SBlock block; public final boolean synthetic; private CompilerSettings settings; @@ -65,7 +65,7 @@ public final class SFunction extends AStatement { private Variable loop = null; public SFunction(Location location, String rtnType, String name, - List paramTypes, List paramNames, List statements, + List paramTypes, List paramNames, SBlock block, boolean synthetic) { super(location); @@ -73,27 +73,23 @@ public final class SFunction extends AStatement { this.name = Objects.requireNonNull(name); this.paramTypeStrs = Collections.unmodifiableList(paramTypes); this.paramNameStrs = Collections.unmodifiableList(paramNames); - this.statements = Collections.unmodifiableList(statements); + this.block = Objects.requireNonNull(block); this.synthetic = synthetic; } @Override void storeSettings(CompilerSettings settings) { - for (AStatement statement : statements) { - statement.storeSettings(settings); - } + block.storeSettings(settings); this.settings = settings; } @Override void extractVariables(Set variables) { - for (AStatement statement : statements) { - // we reset the list for function scope - // note this is not stored for this node - // but still required for lambdas - statement.extractVariables(new HashSet<>()); - } + // we reset the list for function scope + // note this is not stored for this node + // but still required for lambdas + block.extractVariables(new HashSet<>()); } void generateSignature(PainlessLookup painlessLookup) { @@ -131,28 +127,14 @@ public final class SFunction extends AStatement { @Override void analyze(Locals locals) { - if (statements == null || statements.isEmpty()) { + if (block.statements.isEmpty()) { throw createError(new IllegalArgumentException("Cannot generate an empty function [" + name + "].")); } locals = Locals.newLocalScope(locals); - - AStatement last = statements.get(statements.size() - 1); - - for (AStatement statement : statements) { - // Note that we do not need to check after the last statement because - // there is no statement that can be unreachable after the last. - if (allEscape) { - throw createError(new IllegalArgumentException("Unreachable statement.")); - } - - statement.lastSource = statement == last; - - statement.analyze(locals); - - methodEscape = statement.methodEscape; - allEscape = statement.allEscape; - } + block.lastSource = true; + block.analyze(locals); + methodEscape = block.methodEscape; if (!methodEscape && returnType != void.class) { throw createError(new IllegalArgumentException("Not all paths provide a return value for method [" + name + "].")); @@ -184,9 +166,7 @@ public final class SFunction extends AStatement { function.visitVarInsn(Opcodes.ISTORE, loop.getSlot()); } - for (AStatement statement : statements) { - statement.write(function, globals); - } + block.write(function, globals); if (!methodEscape) { if (returnType == void.class) { @@ -205,6 +185,6 @@ public final class SFunction extends AStatement { if (false == (paramTypeStrs.isEmpty() && paramNameStrs.isEmpty())) { description.add(joinWithName("Args", pairwiseToString(paramTypeStrs, paramNameStrs), emptyList())); } - return multilineToString(description, statements); + return multilineToString(description, block.statements); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java index 8ba8b79b74a9..266968b39cd1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java @@ -88,7 +88,7 @@ * {@link org.elasticsearch.painless.node.SIf} - Represents an if block. * {@link org.elasticsearch.painless.node.SIfElse} - Represents an if/else block. * {@link org.elasticsearch.painless.node.SReturn} - Represents a return statement. - * {@link org.elasticsearch.painless.node.SSource} - The root of all Painless trees. Contains a series of statements. + * {@link org.elasticsearch.painless.node.SClass} - The root of all Painless trees. Contains a series of statements. * {@link org.elasticsearch.painless.node.SSubEachArray} - Represents a for-each loop for arrays. * {@link org.elasticsearch.painless.node.SSubEachIterable} - Represents a for-each loop for iterables. * {@link org.elasticsearch.painless.node.SThrow} - Represents a throw statement. @@ -98,7 +98,7 @@ * Note that internal nodes are generated during the analysis phase by modifying the tree on-the-fly * for clarity of development and convenience during the writing phase. *

- * All Painless trees must start with an SSource node at the root. Each node has a constructor that requires + * All Painless trees must start with an SClass node at the root. Each node has a constructor that requires * all of its values and children be passed in at the time of instantiation. This means that Painless trees * are build bottom-up; however, this helps enforce tree structure correctness and fits naturally with a * standard recursive-descent parser. @@ -109,7 +109,7 @@ *

* Generally, expression nodes have member data that evaluate static and def types. The typical order for an expression node * during the analysis phase looks like the following: - * {@code + *

{@code
  * For known expected types:
  *
  * expression.child.expected = expectedType      // set the known expected type
@@ -132,7 +132,7 @@
  * expression.child = expression.child.cast(...) // add an implicit cast node if the child node's
  *                                               // actual type is not the expected type and set the
  *                                               // expression's child to the implicit cast node
- * }
+ * }
* Expression nodes just call each child during the writing phase. *

* Postfix nodes represent postfixes in a variable/method chain including braces, calls, or fields. diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayLikeObjectTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayLikeObjectTestCase.java index 2f7c37e2f693..13400fcc2530 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayLikeObjectTestCase.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayLikeObjectTestCase.java @@ -87,7 +87,8 @@ public abstract class ArrayLikeObjectTestCase extends ScriptTestCase { */ assertThat(e.getMessage(), outOfBoundsExceptionMessageMatcher(index, 5)); } catch (AssertionError ae) { - ae.addSuppressed(e); // Mark the exception we are testing as suppressed so we get its stack trace. + // Mark the exception we are testing as suppressed so we get its stack trace. + ae.addSuppressed(e); throw ae; } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java index f129984863ec..d2832dfdfd5e 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java @@ -53,7 +53,7 @@ public class NodeToStringTests extends ESTestCase { public void testEAssignment() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def i))\n" + " (SExpression (EAssignment (EVariable i) = (ENumeric 2)))\n" + " (SReturn (EVariable i)))", @@ -62,7 +62,7 @@ public class NodeToStringTests extends ESTestCase { + "return i"); for (String operator : new String[] {"+", "-", "*", "/", "%", "&", "^", "|", "<<", ">>", ">>>"}) { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def i (ENumeric 1)))\n" + " (SExpression (EAssignment (EVariable i) " + operator + "= (ENumeric 2)))\n" + " (SReturn (EVariable i)))", @@ -72,31 +72,31 @@ public class NodeToStringTests extends ESTestCase { } // Compound assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def i))\n" + " (SReturn (EAssignment (EVariable i) = (ENumeric 2))))", "def i;\n" + "return i = 2"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def i))\n" + " (SReturn (EAssignment (EVariable i) ++ post)))", "def i;\n" + "return i++"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def i))\n" + " (SReturn (EAssignment (EVariable i) ++ pre)))", "def i;\n" + "return ++i"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def i))\n" + " (SReturn (EAssignment (EVariable i) -- post)))", "def i;\n" + "return i--"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def i))\n" + " (SReturn (EAssignment (EVariable i) -- pre)))", "def i;\n" @@ -104,34 +104,34 @@ public class NodeToStringTests extends ESTestCase { } public void testEBinary() { - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) * (ENumeric 1))))", "return 1 * 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) / (ENumeric 1))))", "return 1 / 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) % (ENumeric 1))))", "return 1 % 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) + (ENumeric 1))))", "return 1 + 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) - (ENumeric 1))))", "return 1 - 1"); - assertToString( "(SSource (SReturn (EBinary (EString 'asb') =~ (ERegex /cat/))))", "return 'asb' =~ /cat/"); - assertToString("(SSource (SReturn (EBinary (EString 'asb') ==~ (ERegex /cat/))))", "return 'asb' ==~ /cat/"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) << (ENumeric 1))))", "return 1 << 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) >> (ENumeric 1))))", "return 1 >> 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) >>> (ENumeric 1))))", "return 1 >>> 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) & (ENumeric 1))))", "return 1 & 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) ^ (ENumeric 1))))", "return 1 ^ 1"); - assertToString( "(SSource (SReturn (EBinary (ENumeric 1) | (ENumeric 1))))", "return 1 | 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) * (ENumeric 1))))", "return 1 * 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) / (ENumeric 1))))", "return 1 / 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) % (ENumeric 1))))", "return 1 % 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) + (ENumeric 1))))", "return 1 + 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) - (ENumeric 1))))", "return 1 - 1"); + assertToString( "(SClass (SReturn (EBinary (EString 'asb') =~ (ERegex /cat/))))", "return 'asb' =~ /cat/"); + assertToString("(SClass (SReturn (EBinary (EString 'asb') ==~ (ERegex /cat/))))", "return 'asb' ==~ /cat/"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) << (ENumeric 1))))", "return 1 << 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) >> (ENumeric 1))))", "return 1 >> 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) >>> (ENumeric 1))))", "return 1 >>> 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) & (ENumeric 1))))", "return 1 & 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) ^ (ENumeric 1))))", "return 1 ^ 1"); + assertToString( "(SClass (SReturn (EBinary (ENumeric 1) | (ENumeric 1))))", "return 1 | 1"); } public void testEBool() { - assertToString("(SSource (SReturn (EBool (EBoolean true) && (EBoolean false))))", "return true && false"); - assertToString("(SSource (SReturn (EBool (EBoolean true) || (EBoolean false))))", "return true || false"); + assertToString("(SClass (SReturn (EBool (EBoolean true) && (EBoolean false))))", "return true && false"); + assertToString("(SClass (SReturn (EBool (EBoolean true) || (EBoolean false))))", "return true || false"); } public void testEBoolean() { - assertToString("(SSource (SReturn (EBoolean true)))", "return true"); - assertToString("(SSource (SReturn (EBoolean false)))", "return false"); + assertToString("(SClass (SReturn (EBoolean true)))", "return true"); + assertToString("(SClass (SReturn (EBoolean false)))", "return false"); } public void testECallLocal() { assertToString( - "(SSource\n" + "(SClass\n" + " (SFunction def a\n" + " (SReturn (EBoolean true)))\n" + " (SReturn (ECallLocal a)))", @@ -140,7 +140,7 @@ public class NodeToStringTests extends ESTestCase { + "}\n" + "return a()"); assertToString( - "(SSource\n" + "(SClass\n" + " (SFunction def a (Args (Pair int i) (Pair int j))\n" + " (SReturn (EBoolean true)))\n" + " (SReturn (ECallLocal a (Args (ENumeric 1) (ENumeric 2)))))", @@ -152,7 +152,7 @@ public class NodeToStringTests extends ESTestCase { public void testECapturingFunctionRef() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration Integer x (PCallInvoke (EStatic Integer) valueOf (Args (ENumeric 5)))))\n" + " (SReturn (PCallInvoke (PCallInvoke (EStatic Optional) empty) orElseGet (Args (ECapturingFunctionRef x toString)))))", "Integer x = Integer.valueOf(5);\n" @@ -173,18 +173,18 @@ public class NodeToStringTests extends ESTestCase { } public void testEComp() { - assertToString( "(SSource (SReturn (EComp (PField (EVariable params) a) < (ENumeric 10))))", "return params.a < 10"); - assertToString( "(SSource (SReturn (EComp (PField (EVariable params) a) <= (ENumeric 10))))", "return params.a <= 10"); - assertToString( "(SSource (SReturn (EComp (PField (EVariable params) a) > (ENumeric 10))))", "return params.a > 10"); - assertToString( "(SSource (SReturn (EComp (PField (EVariable params) a) >= (ENumeric 10))))", "return params.a >= 10"); - assertToString( "(SSource (SReturn (EComp (PField (EVariable params) a) == (ENumeric 10))))", "return params.a == 10"); - assertToString("(SSource (SReturn (EComp (PField (EVariable params) a) === (ENumeric 10))))", "return params.a === 10"); - assertToString( "(SSource (SReturn (EComp (PField (EVariable params) a) != (ENumeric 10))))", "return params.a != 10"); - assertToString("(SSource (SReturn (EComp (PField (EVariable params) a) !== (ENumeric 10))))", "return params.a !== 10"); + assertToString( "(SClass (SReturn (EComp (PField (EVariable params) a) < (ENumeric 10))))", "return params.a < 10"); + assertToString( "(SClass (SReturn (EComp (PField (EVariable params) a) <= (ENumeric 10))))", "return params.a <= 10"); + assertToString( "(SClass (SReturn (EComp (PField (EVariable params) a) > (ENumeric 10))))", "return params.a > 10"); + assertToString( "(SClass (SReturn (EComp (PField (EVariable params) a) >= (ENumeric 10))))", "return params.a >= 10"); + assertToString( "(SClass (SReturn (EComp (PField (EVariable params) a) == (ENumeric 10))))", "return params.a == 10"); + assertToString("(SClass (SReturn (EComp (PField (EVariable params) a) === (ENumeric 10))))", "return params.a === 10"); + assertToString( "(SClass (SReturn (EComp (PField (EVariable params) a) != (ENumeric 10))))", "return params.a != 10"); + assertToString("(SClass (SReturn (EComp (PField (EVariable params) a) !== (ENumeric 10))))", "return params.a !== 10"); } public void testEConditional() { - assertToString("(SSource (SReturn (EConditional (PField (EVariable params) a) (ENumeric 1) (ENumeric 6))))", + assertToString("(SClass (SReturn (EConditional (PField (EVariable params) a) (ENumeric 1) (ENumeric 6))))", "return params.a ? 1 : 6"); } @@ -196,68 +196,68 @@ public class NodeToStringTests extends ESTestCase { } public void testEDecimal() { - assertToString("(SSource (SReturn (EDecimal 1.0)))", "return 1.0"); - assertToString("(SSource (SReturn (EDecimal 14.121d)))", "return 14.121d"); - assertToString("(SSource (SReturn (EDecimal 2234.1f)))", "return 2234.1f"); - assertToString("(SSource (SReturn (EDecimal 14.121D)))", "return 14.121D"); - assertToString("(SSource (SReturn (EDecimal 1234.1F)))", "return 1234.1F"); + assertToString("(SClass (SReturn (EDecimal 1.0)))", "return 1.0"); + assertToString("(SClass (SReturn (EDecimal 14.121d)))", "return 14.121d"); + assertToString("(SClass (SReturn (EDecimal 2234.1f)))", "return 2234.1f"); + assertToString("(SClass (SReturn (EDecimal 14.121D)))", "return 14.121D"); + assertToString("(SClass (SReturn (EDecimal 1234.1F)))", "return 1234.1F"); } public void testEElvis() { - assertToString("(SSource (SReturn (EElvis (PField (EVariable params) a) (ENumeric 1))))", "return params.a ?: 1"); + assertToString("(SClass (SReturn (EElvis (PField (EVariable params) a) (ENumeric 1))))", "return params.a ?: 1"); } public void testEExplicit() { - assertToString("(SSource (SReturn (EExplicit byte (PField (EVariable params) a))))", "return (byte)(params.a)"); + assertToString("(SClass (SReturn (EExplicit byte (PField (EVariable params) a))))", "return (byte)(params.a)"); } public void testEFunctionRef() { assertToString( - "(SSource (SReturn " + "(SClass (SReturn " + "(PCallInvoke (PCallInvoke (EStatic Optional) empty) orElseGet (Args (EFunctionRef Optional empty)))))", "return Optional.empty().orElseGet(Optional::empty)"); } public void testEInstanceOf() { - assertToString("(SSource (SReturn (EInstanceof (ENewObj Object) Object)))", "return new Object() instanceof Object"); - assertToString("(SSource (SReturn (EInstanceof (ENumeric 12) double)))", "return 12 instanceof double"); + assertToString("(SClass (SReturn (EInstanceof (ENewObj Object) Object)))", "return new Object() instanceof Object"); + assertToString("(SClass (SReturn (EInstanceof (ENumeric 12) double)))", "return 12 instanceof double"); } public void testELambda() { assertToString( - "(SSource (SReturn (PCallInvoke (PCallInvoke (EStatic Optional) empty) orElseGet (Args " + "(SClass (SReturn (PCallInvoke (PCallInvoke (EStatic Optional) empty) orElseGet (Args " + "(ELambda (SReturn (ENumeric 1)))))))", "return Optional.empty().orElseGet(() -> {\n" + " return 1\n" + "})"); assertToString( - "(SSource (SReturn (PCallInvoke (PCallInvoke (EStatic Optional) empty) orElseGet (Args " + "(SClass (SReturn (PCallInvoke (PCallInvoke (EStatic Optional) empty) orElseGet (Args " + "(ELambda (SReturn (ENumeric 1)))))))", "return Optional.empty().orElseGet(() -> 1)"); assertToString( - "(SSource (SReturn (PCallInvoke (PCallInvoke (PCallInvoke (EListInit (ENumeric 1) (ENumeric 2) (ENumeric 3)) stream) " + "(SClass (SReturn (PCallInvoke (PCallInvoke (PCallInvoke (EListInit (ENumeric 1) (ENumeric 2) (ENumeric 3)) stream) " + "mapToInt (Args (ELambda (Pair def x)\n" + " (SReturn (EBinary (EVariable x) + (ENumeric 1)))))) sum)))", "return [1, 2, 3].stream().mapToInt((def x) -> {\n" + " return x + 1\n" + "}).sum()"); assertToString( - "(SSource (SReturn (PCallInvoke (PCallInvoke (PCallInvoke (EListInit (ENumeric 1) (ENumeric 2) (ENumeric 3)) stream) " + "(SClass (SReturn (PCallInvoke (PCallInvoke (PCallInvoke (EListInit (ENumeric 1) (ENumeric 2) (ENumeric 3)) stream) " + "mapToInt (Args (ELambda (Pair null x)\n" + " (SReturn (EBinary (EVariable x) + (ENumeric 1)))))) sum)))", "return [1, 2, 3].stream().mapToInt(x -> x + 1).sum()"); assertToString( - "(SSource (SReturn (PCallInvoke (EListInit (EString 'a') (EString 'b')) sort (Args (ELambda (Pair def a) (Pair def b)\n" + "(SClass (SReturn (PCallInvoke (EListInit (EString 'a') (EString 'b')) sort (Args (ELambda (Pair def a) (Pair def b)\n" + " (SReturn (EBinary (PCallInvoke (EVariable a) length) - (PCallInvoke (EVariable b) length))))))))", "return ['a', 'b'].sort((def a, def b) -> {\n" + " return a.length() - b.length()\n" + "})"); assertToString( - "(SSource (SReturn (PCallInvoke (EListInit (EString 'a') (EString 'b')) sort (Args (ELambda (Pair null a) (Pair null b)\n" + "(SClass (SReturn (PCallInvoke (EListInit (EString 'a') (EString 'b')) sort (Args (ELambda (Pair null a) (Pair null b)\n" + " (SReturn (EBinary (PCallInvoke (EVariable a) length) - (PCallInvoke (EVariable b) length))))))))", "return ['a', 'b'].sort((a, b) -> a.length() - b.length())"); assertToString( - "(SSource (SReturn (PCallInvoke (EListInit (EString 'a') (EString 'b')) sort (Args (ELambda (Pair def a) (Pair def b)\n" + "(SClass (SReturn (PCallInvoke (EListInit (EString 'a') (EString 'b')) sort (Args (ELambda (Pair def a) (Pair def b)\n" + " (SIf (EComp (EVariable a) < (EVariable b)) (SBlock " + "(SReturn (EBinary (PCallInvoke (EVariable a) length) - (PCallInvoke (EVariable b) length)))))\n" + " (SReturn (ENumeric 1)))))))", @@ -270,85 +270,85 @@ public class NodeToStringTests extends ESTestCase { } public void testEListInit() { - assertToString("(SSource (SReturn (EListInit (ENumeric 1) (ENumeric 2) (EString 'cat') (EString 'dog') (ENewObj Object))))", + assertToString("(SClass (SReturn (EListInit (ENumeric 1) (ENumeric 2) (EString 'cat') (EString 'dog') (ENewObj Object))))", "return [1, 2, 'cat', 'dog', new Object()]"); - assertToString("(SSource (SReturn (EListInit)))", "return []"); + assertToString("(SClass (SReturn (EListInit)))", "return []"); } public void testEMapInit() { - assertToString("(SSource (SReturn (EMapInit " + assertToString("(SClass (SReturn (EMapInit " + "(Pair (EString 'a') (ENumeric 1)) " + "(Pair (EString 'b') (ENumeric 3)) " + "(Pair (ENumeric 12) (ENewObj Object)))))", "return ['a': 1, 'b': 3, 12: new Object()]"); - assertToString("(SSource (SReturn (EMapInit)))", "return [:]"); + assertToString("(SClass (SReturn (EMapInit)))", "return [:]"); } public void testENewArray() { - assertToString("(SSource (SReturn (ENewArray int[] dims (Args (ENumeric 10)))))", "return new int[10]"); - assertToString("(SSource (SReturn (ENewArray int[][][] dims (Args (ENumeric 10) (ENumeric 4) (ENumeric 5)))))", + assertToString("(SClass (SReturn (ENewArray int[] dims (Args (ENumeric 10)))))", "return new int[10]"); + assertToString("(SClass (SReturn (ENewArray int[][][] dims (Args (ENumeric 10) (ENumeric 4) (ENumeric 5)))))", "return new int[10][4][5]"); - assertToString("(SSource (SReturn (ENewArray int[] init (Args (ENumeric 1) (ENumeric 2) (ENumeric 3)))))", + assertToString("(SClass (SReturn (ENewArray int[] init (Args (ENumeric 1) (ENumeric 2) (ENumeric 3)))))", "return new int[] {1, 2, 3}"); - assertToString("(SSource (SReturn (ENewArray def[] init (Args (ENumeric 1) (ENumeric 2) (EString 'bird')))))", + assertToString("(SClass (SReturn (ENewArray def[] init (Args (ENumeric 1) (ENumeric 2) (EString 'bird')))))", "return new def[] {1, 2, 'bird'}"); } public void testENewObj() { - assertToString("(SSource (SReturn (ENewObj Object)))", "return new Object()"); - assertToString("(SSource (SReturn (ENewObj DateTimeException (Args (EString 'test')))))", "return new DateTimeException('test')"); + assertToString("(SClass (SReturn (ENewObj Object)))", "return new Object()"); + assertToString("(SClass (SReturn (ENewObj DateTimeException (Args (EString 'test')))))", "return new DateTimeException('test')"); } public void testENull() { - assertToString("(SSource (SReturn (ENull)))", "return null"); + assertToString("(SClass (SReturn (ENull)))", "return null"); } public void testENumeric() { - assertToString("(SSource (SReturn (ENumeric 1)))", "return 1"); - assertToString("(SSource (SReturn (ENumeric 114121d)))", "return 114121d"); - assertToString("(SSource (SReturn (ENumeric 114134f)))", "return 114134f"); - assertToString("(SSource (SReturn (ENumeric 114121D)))", "return 114121D"); - assertToString("(SSource (SReturn (ENumeric 111234F)))", "return 111234F"); - assertToString("(SSource (SReturn (ENumeric 774121l)))", "return 774121l"); - assertToString("(SSource (SReturn (ENumeric 881234L)))", "return 881234L"); + assertToString("(SClass (SReturn (ENumeric 1)))", "return 1"); + assertToString("(SClass (SReturn (ENumeric 114121d)))", "return 114121d"); + assertToString("(SClass (SReturn (ENumeric 114134f)))", "return 114134f"); + assertToString("(SClass (SReturn (ENumeric 114121D)))", "return 114121D"); + assertToString("(SClass (SReturn (ENumeric 111234F)))", "return 111234F"); + assertToString("(SClass (SReturn (ENumeric 774121l)))", "return 774121l"); + assertToString("(SClass (SReturn (ENumeric 881234L)))", "return 881234L"); - assertToString("(SSource (SReturn (ENumeric 1 16)))", "return 0x1"); - assertToString("(SSource (SReturn (ENumeric 774121l 16)))", "return 0x774121l"); - assertToString("(SSource (SReturn (ENumeric 881234L 16)))", "return 0x881234L"); + assertToString("(SClass (SReturn (ENumeric 1 16)))", "return 0x1"); + assertToString("(SClass (SReturn (ENumeric 774121l 16)))", "return 0x774121l"); + assertToString("(SClass (SReturn (ENumeric 881234L 16)))", "return 0x881234L"); - assertToString("(SSource (SReturn (ENumeric 1 8)))", "return 01"); - assertToString("(SSource (SReturn (ENumeric 774121l 8)))", "return 0774121l"); - assertToString("(SSource (SReturn (ENumeric 441234L 8)))", "return 0441234L"); + assertToString("(SClass (SReturn (ENumeric 1 8)))", "return 01"); + assertToString("(SClass (SReturn (ENumeric 774121l 8)))", "return 0774121l"); + assertToString("(SClass (SReturn (ENumeric 441234L 8)))", "return 0441234L"); } public void testERegex() { - assertToString("(SSource (SReturn (ERegex /foo/)))", "return /foo/"); - assertToString("(SSource (SReturn (ERegex /foo/ cix)))", "return /foo/cix"); - assertToString("(SSource (SReturn (ERegex /foo/ cix)))", "return /foo/xci"); + assertToString("(SClass (SReturn (ERegex /foo/)))", "return /foo/"); + assertToString("(SClass (SReturn (ERegex /foo/ cix)))", "return /foo/cix"); + assertToString("(SClass (SReturn (ERegex /foo/ cix)))", "return /foo/xci"); } public void testEStatic() { - assertToString("(SSource (SReturn (PCallInvoke (EStatic Optional) empty)))", "return Optional.empty()"); + assertToString("(SClass (SReturn (PCallInvoke (EStatic Optional) empty)))", "return Optional.empty()"); } public void testEString() { - assertToString("(SSource (SReturn (EString 'foo')))", "return 'foo'"); - assertToString("(SSource (SReturn (EString ' oo')))", "return ' oo'"); - assertToString("(SSource (SReturn (EString 'fo ')))", "return 'fo '"); - assertToString("(SSource (SReturn (EString ' o ')))", "return ' o '"); + assertToString("(SClass (SReturn (EString 'foo')))", "return 'foo'"); + assertToString("(SClass (SReturn (EString ' oo')))", "return ' oo'"); + assertToString("(SClass (SReturn (EString 'fo ')))", "return 'fo '"); + assertToString("(SClass (SReturn (EString ' o ')))", "return ' o '"); } public void testEUnary() { - assertToString("(SSource (SReturn (EUnary ! (EBoolean true))))", "return !true"); - assertToString("(SSource (SReturn (EUnary ~ (ENumeric 1))))", "return ~1"); - assertToString("(SSource (SReturn (EUnary + (ENumeric 1))))", "return +1"); - assertToString("(SSource (SReturn (EUnary - (ENumeric 1))))", "return -(1)"); + assertToString("(SClass (SReturn (EUnary ! (EBoolean true))))", "return !true"); + assertToString("(SClass (SReturn (EUnary ~ (ENumeric 1))))", "return ~1"); + assertToString("(SClass (SReturn (EUnary + (ENumeric 1))))", "return +1"); + assertToString("(SClass (SReturn (EUnary - (ENumeric 1))))", "return -(1)"); } public void testEVariable() { - assertToString("(SSource (SReturn (EVariable params)))", "return params"); + assertToString("(SClass (SReturn (EVariable params)))", "return params"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def a (ENumeric 1)))\n" + " (SReturn (EVariable a)))", "def a = 1;\n" @@ -356,29 +356,29 @@ public class NodeToStringTests extends ESTestCase { } public void testPBrace() { - assertToString("(SSource (SReturn (PBrace (PField (EVariable params) a) (ENumeric 10))))", "return params.a[10]"); - assertToString("(SSource (SReturn (PBrace (EVariable params) (EString 'a'))))", "return params['a']"); + assertToString("(SClass (SReturn (PBrace (PField (EVariable params) a) (ENumeric 10))))", "return params.a[10]"); + assertToString("(SClass (SReturn (PBrace (EVariable params) (EString 'a'))))", "return params['a']"); } public void testPCallInvoke() { - assertToString("(SSource (SReturn (PCallInvoke (EStatic Optional) empty)))", "return Optional.empty()"); - assertToString("(SSource (SReturn (PCallInvoke (EStatic Optional) of (Args (ENumeric 1)))))", "return Optional.of(1)"); - assertToString("(SSource (SReturn (PCallInvoke (EStatic Objects) equals (Args (ENumeric 1) (ENumeric 2)))))", + assertToString("(SClass (SReturn (PCallInvoke (EStatic Optional) empty)))", "return Optional.empty()"); + assertToString("(SClass (SReturn (PCallInvoke (EStatic Optional) of (Args (ENumeric 1)))))", "return Optional.of(1)"); + assertToString("(SClass (SReturn (PCallInvoke (EStatic Objects) equals (Args (ENumeric 1) (ENumeric 2)))))", "return Objects.equals(1, 2)"); - assertToString("(SSource (SReturn (PCallInvoke (EVariable params) equals (Args (ENumeric 1)))))", "return params.equals(1)"); + assertToString("(SClass (SReturn (PCallInvoke (EVariable params) equals (Args (ENumeric 1)))))", "return params.equals(1)"); } public void testPField() { - assertToString("(SSource (SReturn (PField (EVariable params) a)))", "return params.a"); - assertToString("(SSource (SReturn (PField nullSafe (EVariable params) a)))", "return params?.a"); + assertToString("(SClass (SReturn (PField (EVariable params) a)))", "return params.a"); + assertToString("(SClass (SReturn (PField nullSafe (EVariable params) a)))", "return params?.a"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int[] a (ENewArray int[] dims (Args (ENumeric 10)))))\n" + " (SReturn (PField (EVariable a) length)))", "int[] a = new int[10];\n" + "return a.length"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration org.elasticsearch.painless.FeatureTestObject a" + " (ENewObj org.elasticsearch.painless.FeatureTestObject)))\n" + " (SExpression (EAssignment (PField (EVariable a) x) = (ENumeric 10)))\n" @@ -510,7 +510,7 @@ public class NodeToStringTests extends ESTestCase { public void testSBreak() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int itr (ENumeric 2)))\n" + " (SDeclBlock (SDeclaration int a (ENumeric 1)))\n" + " (SDeclBlock (SDeclaration int b (ENumeric 1)))\n" @@ -538,7 +538,7 @@ public class NodeToStringTests extends ESTestCase { public void testSContinue() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int itr (ENumeric 2)))\n" + " (SDeclBlock (SDeclaration int a (ENumeric 1)))\n" + " (SDeclBlock (SDeclaration int b (ENumeric 1)))\n" @@ -566,7 +566,7 @@ public class NodeToStringTests extends ESTestCase { public void testSDeclBlock() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def a))\n" + " (SExpression (EAssignment (EVariable a) = (ENumeric 10)))\n" + " (SReturn (EVariable a)))", @@ -574,13 +574,13 @@ public class NodeToStringTests extends ESTestCase { + "a = 10;\n" + "return a"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration def a (ENumeric 10)))\n" + " (SReturn (EVariable a)))", "def a = 10;\n" + "return a"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock\n" + " (SDeclaration def a)\n" + " (SDeclaration def b)\n" @@ -589,7 +589,7 @@ public class NodeToStringTests extends ESTestCase { "def a, b, c;\n" + "return a"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock\n" + " (SDeclaration def a (ENumeric 10))\n" + " (SDeclaration def b (ENumeric 20))\n" @@ -598,7 +598,7 @@ public class NodeToStringTests extends ESTestCase { "def a = 10, b = 20, c = 100;\n" + "return a"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock\n" + " (SDeclaration def a (ENumeric 10))\n" + " (SDeclaration def b)\n" @@ -607,7 +607,7 @@ public class NodeToStringTests extends ESTestCase { "def a = 10, b, c = 100;\n" + "return a"); assertToString( - "(SSource\n" + "(SClass\n" + " (SIf (PField (EVariable params) a) (SBlock\n" + " (SDeclBlock\n" + " (SDeclaration def a (ENumeric 10))\n" @@ -624,7 +624,7 @@ public class NodeToStringTests extends ESTestCase { public void testSDo() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int itr (ENumeric 2)))\n" + " (SDeclBlock (SDeclaration int a (ENumeric 1)))\n" + " (SDeclBlock (SDeclaration int b (ENumeric 1)))\n" @@ -648,7 +648,7 @@ public class NodeToStringTests extends ESTestCase { public void testSEach() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int l (ENumeric 0)))\n" + " (SEach String s (EListInit (EString 'cat') (EString 'dog') (EString 'chicken')) (SBlock " + "(SExpression (EAssignment (EVariable l) += (PCallInvoke (EVariable s) length)))))\n" @@ -659,7 +659,7 @@ public class NodeToStringTests extends ESTestCase { + "}\n" + "return l"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int l (ENumeric 0)))\n" + " (SEach String s (EListInit (EString 'cat') (EString 'dog') (EString 'chicken')) (SBlock\n" + " (SDeclBlock (SDeclaration String s2 (EBinary (EString 'dire ') + (EVariable s))))\n" @@ -675,7 +675,7 @@ public class NodeToStringTests extends ESTestCase { public void testSFor() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int sum (ENumeric 0)))\n" + " (SFor\n" + " (SDeclBlock (SDeclaration int i (ENumeric 0)))\n" @@ -689,7 +689,7 @@ public class NodeToStringTests extends ESTestCase { + "}\n" + "return sum"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int sum (ENumeric 0)))\n" + " (SFor\n" + " (SDeclBlock (SDeclaration int i (ENumeric 0)))\n" @@ -712,12 +712,12 @@ public class NodeToStringTests extends ESTestCase { public void testSIf() { assertToString( - "(SSource (SIf (PField (EVariable param) a) (SBlock (SReturn (EBoolean true)))))", + "(SClass (SIf (PField (EVariable param) a) (SBlock (SReturn (EBoolean true)))))", "if (param.a) {\n" + " return true\n" +"}"); assertToString( - "(SSource (SIf (PField (EVariable param) a) (SBlock\n" + "(SClass (SIf (PField (EVariable param) a) (SBlock\n" + " (SIf (PField (EVariable param) b) (SBlock (SReturn (EBoolean true))))\n" + " (SReturn (EBoolean false)))))", "if (param.a) {\n" @@ -730,7 +730,7 @@ public class NodeToStringTests extends ESTestCase { public void testSIfElse() { assertToString( - "(SSource (SIfElse (PField (EVariable param) a)\n" + "(SClass (SIfElse (PField (EVariable param) a)\n" + " (SBlock (SReturn (EBoolean true)))\n" + " (SBlock (SReturn (EBoolean false)))))", "if (param.a) {\n" @@ -739,7 +739,7 @@ public class NodeToStringTests extends ESTestCase { + " return false\n" + "}"); assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int i (ENumeric 0)))\n" + " (SIfElse (PField (EVariable param) a)\n" + " (SBlock (SIfElse (PField (EVariable param) b)\n" @@ -783,12 +783,12 @@ public class NodeToStringTests extends ESTestCase { } public void testSThrow() { - assertToString("(SSource (SThrow (ENewObj RuntimeException)))", "throw new RuntimeException()"); + assertToString("(SClass (SThrow (ENewObj RuntimeException)))", "throw new RuntimeException()"); } public void testSWhile() { assertToString( - "(SSource\n" + "(SClass\n" + " (SDeclBlock (SDeclaration int i (ENumeric 0)))\n" + " (SWhile (EComp (EVariable i) < (ENumeric 10)) (SBlock (SExpression (EAssignment (EVariable i) ++ post))))\n" + " (SReturn (EVariable i)))", @@ -801,7 +801,7 @@ public class NodeToStringTests extends ESTestCase { public void testSFunction() { assertToString( - "(SSource\n" + "(SClass\n" + " (SFunction def a\n" + " (SReturn (EBoolean true)))\n" + " (SReturn (EBoolean true)))", @@ -810,7 +810,7 @@ public class NodeToStringTests extends ESTestCase { + "}\n" + "return true"); assertToString( - "(SSource\n" + "(SClass\n" + " (SFunction def a (Args (Pair int i) (Pair int j))\n" + " (SReturn (EBoolean true)))\n" + " (SReturn (EBoolean true)))", @@ -819,7 +819,7 @@ public class NodeToStringTests extends ESTestCase { + "}\n" + "return true"); assertToString( - "(SSource\n" + "(SClass\n" + " (SFunction def a (Args (Pair int i) (Pair int j))\n" + " (SIf (EComp (EVariable i) < (EVariable j)) (SBlock (SReturn (EBoolean true))))\n" + " (SDeclBlock (SDeclaration int k (EBinary (EVariable i) + (EVariable j))))\n" @@ -834,7 +834,7 @@ public class NodeToStringTests extends ESTestCase { + "}\n" + "return true"); assertToString( - "(SSource\n" + "(SClass\n" + " (SFunction def a\n" + " (SReturn (EBoolean true)))\n" + " (SFunction def b\n" @@ -851,7 +851,7 @@ public class NodeToStringTests extends ESTestCase { public void testSTryAndSCatch() { assertToString( - "(SSource (STry (SBlock (SReturn (ENumeric 1)))\n" + "(SClass (STry (SBlock (SReturn (ENumeric 1)))\n" + " (SCatch Exception e (SBlock (SReturn (ENumeric 2))))))", "try {\n" + " return 1\n" @@ -859,7 +859,7 @@ public class NodeToStringTests extends ESTestCase { + " return 2\n" + "}"); assertToString( - "(SSource (STry (SBlock\n" + "(SClass (STry (SBlock\n" + " (SDeclBlock (SDeclaration int i (ENumeric 1)))\n" + " (SReturn (ENumeric 1)))\n" + " (SCatch Exception e (SBlock (SReturn (ENumeric 2))))))", @@ -870,7 +870,7 @@ public class NodeToStringTests extends ESTestCase { + " return 2\n" + "}"); assertToString( - "(SSource (STry (SBlock (SReturn (ENumeric 1)))\n" + "(SClass (STry (SBlock (SReturn (ENumeric 1)))\n" + " (SCatch Exception e (SBlock\n" + " (SDeclBlock (SDeclaration int i (ENumeric 1)))\n" + " (SReturn (ENumeric 2))))))", @@ -881,7 +881,7 @@ public class NodeToStringTests extends ESTestCase { + " return 2\n" + "}"); assertToString( - "(SSource (STry (SBlock (SReturn (ENumeric 1)))\n" + "(SClass (STry (SBlock (SReturn (ENumeric 1)))\n" + " (SCatch NullPointerException e (SBlock (SReturn (ENumeric 2))))\n" + " (SCatch Exception e (SBlock (SReturn (ENumeric 3))))))", "try {\n" @@ -905,7 +905,7 @@ public class NodeToStringTests extends ESTestCase { assertEquals(expected, walk(code).toString()); } - private SSource walk(String code) { + private SClass walk(String code) { ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, PainlessTestScript.class); CompilerSettings compilerSettings = new CompilerSettings(); compilerSettings.setRegexesEnabled(true); diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/query/RankFeatureQueryBuilderTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/query/RankFeatureQueryBuilderTests.java index aea37e2a8eeb..0cd048184b96 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/query/RankFeatureQueryBuilderTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/query/RankFeatureQueryBuilderTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.index.mapper.MapperExtrasPlugin; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.RankFeatureQueryBuilder.ScoreFunction; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -91,7 +90,7 @@ public class RankFeatureQueryBuilderTests extends AbstractQueryTestCase expectedClass = FeatureField.newSaturationQuery("", "", 1, 1).getClass(); assertThat(query, either(instanceOf(MatchNoDocsQuery.class)).or(instanceOf(expectedClass))); } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java index 3d805d44ade4..0b33e5ba724b 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java @@ -117,7 +117,7 @@ public class ChildrenIT extends AbstractParentChildTestCase { TopHits topHits = childrenBucket.getAggregations().get("top_comments"); logger.info("total_hits={}", topHits.getHits().getTotalHits().value); for (SearchHit searchHit : topHits.getHits()) { - logger.info("hit= {} {} {}", searchHit.getSortValues()[0], searchHit.getType(), searchHit.getId()); + logger.info("hit= {} {}", searchHit.getSortValues()[0], searchHit.getId()); } } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ChildQuerySearchIT.java index f3ef60ea215f..1d206188faf7 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ChildQuerySearchIT.java @@ -564,7 +564,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getExplanation().getDescription(), containsString("join value p1")); - ExplainResponse explainResponse = client().prepareExplain("test", "doc", parentId) + ExplainResponse explainResponse = client().prepareExplain("test", parentId) .setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)) .get(); assertThat(explainResponse.isExists(), equalTo(true)); diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java index efef9d4f34d5..f40b14d5d95a 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java @@ -53,8 +53,6 @@ import org.elasticsearch.index.query.WrapperQueryBuilder; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.fetch.subphase.InnerHitsContext; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.AbstractQueryTestCase; @@ -165,7 +163,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase innerHitBuilders = new HashMap<>(); InnerHitContextBuilder.extractInnerHits(queryBuilder, innerHitBuilders); - final InnerHitsContext innerHitsContext = new InnerHitsContext(); - for (InnerHitContextBuilder builder : innerHitBuilders.values()) { - builder.build(searchContext, innerHitsContext); - } - assertEquals(1, innerHitsContext.getInnerHits().size()); - assertTrue(innerHitsContext.getInnerHits().containsKey(queryBuilder.innerHit().getName())); - InnerHitsContext.InnerHitSubContext innerHits = innerHitsContext.getInnerHits().get(queryBuilder.innerHit().getName()); - assertEquals(innerHits.size(), queryBuilder.innerHit().getSize()); - assertEquals(innerHits.sort().sort.getSort().length, 1); - assertEquals(innerHits.sort().sort.getSort()[0].getField(), STRING_FIELD_NAME_2); + assertTrue(innerHitBuilders.containsKey(queryBuilder.innerHit().getName())); + InnerHitContextBuilder innerHits = innerHitBuilders.get(queryBuilder.innerHit().getName()); + assertEquals(innerHits.innerHitBuilder(), queryBuilder.innerHit()); } } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java index 0133b233b478..b0b07b28ca38 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java @@ -40,8 +40,6 @@ import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.WrapperQueryBuilder; import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.fetch.subphase.InnerHitsContext; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.AbstractQueryTestCase; @@ -138,7 +136,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase innerHitBuilders = new HashMap<>(); InnerHitContextBuilder.extractInnerHits(queryBuilder, innerHitBuilders); - final InnerHitsContext innerHitsContext = new InnerHitsContext(); - for (InnerHitContextBuilder builder : innerHitBuilders.values()) { - builder.build(searchContext, innerHitsContext); - } - assertEquals(1, innerHitsContext.getInnerHits().size()); - assertTrue(innerHitsContext.getInnerHits().containsKey(queryBuilder.innerHit().getName())); - InnerHitsContext.InnerHitSubContext innerHits = innerHitsContext.getInnerHits().get(queryBuilder.innerHit().getName()); - assertEquals(innerHits.size(), queryBuilder.innerHit().getSize()); - assertEquals(innerHits.sort().sort.getSort().length, 1); - assertEquals(innerHits.sort().sort.getSort()[0].getField(), STRING_FIELD_NAME_2); + assertTrue(innerHitBuilders.containsKey(queryBuilder.innerHit().getName())); + InnerHitContextBuilder innerHits = innerHitBuilders.get(queryBuilder.innerHit().getName()); + assertEquals(innerHits.innerHitBuilder(), queryBuilder.innerHit()); } } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/InnerHitsIT.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/InnerHitsIT.java index 89929985ea59..4d8c19a5cfb4 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/InnerHitsIT.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/InnerHitsIT.java @@ -132,9 +132,7 @@ public class InnerHitsIT extends ParentChildTestCase { assertThat(innerHits.getTotalHits().value, equalTo(2L)); assertThat(innerHits.getAt(0).getId(), equalTo("c1")); - assertThat(innerHits.getAt(0).getType(), equalTo("doc")); assertThat(innerHits.getAt(1).getId(), equalTo("c2")); - assertThat(innerHits.getAt(1).getType(), equalTo("doc")); final boolean seqNoAndTerm = randomBoolean(); response = client().prepareSearch("articles") @@ -150,11 +148,8 @@ public class InnerHitsIT extends ParentChildTestCase { assertThat(innerHits.getTotalHits().value, equalTo(3L)); assertThat(innerHits.getAt(0).getId(), equalTo("c4")); - assertThat(innerHits.getAt(0).getType(), equalTo("doc")); assertThat(innerHits.getAt(1).getId(), equalTo("c5")); - assertThat(innerHits.getAt(1).getType(), equalTo("doc")); assertThat(innerHits.getAt(2).getId(), equalTo("c6")); - assertThat(innerHits.getAt(2).getType(), equalTo("doc")); if (seqNoAndTerm) { assertThat(innerHits.getAt(0).getPrimaryTerm(), equalTo(1L)); @@ -248,7 +243,6 @@ public class InnerHitsIT extends ParentChildTestCase { int offset2 = 0; for (int parent = 0; parent < numDocs; parent++) { SearchHit searchHit = searchResponse.getHits().getAt(parent); - assertThat(searchHit.getType(), equalTo("doc")); assertThat(searchHit.getId(), equalTo(String.format(Locale.ENGLISH, "p_%03d", parent))); assertThat(searchHit.getShard(), notNullValue()); @@ -256,7 +250,6 @@ public class InnerHitsIT extends ParentChildTestCase { assertThat(inner.getTotalHits().value, equalTo((long) child1InnerObjects[parent])); for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) { SearchHit innerHit = inner.getAt(child); - assertThat(innerHit.getType(), equalTo("doc")); String childId = String.format(Locale.ENGLISH, "c1_%04d", offset1 + child); assertThat(innerHit.getId(), equalTo(childId)); assertThat(innerHit.getNestedIdentity(), nullValue()); @@ -267,7 +260,6 @@ public class InnerHitsIT extends ParentChildTestCase { assertThat(inner.getTotalHits().value, equalTo((long) child2InnerObjects[parent])); for (int child = 0; child < child2InnerObjects[parent] && child < size; child++) { SearchHit innerHit = inner.getAt(child); - assertThat(innerHit.getType(), equalTo("doc")); String childId = String.format(Locale.ENGLISH, "c2_%04d", offset2 + child); assertThat(innerHit.getId(), equalTo(childId)); assertThat(innerHit.getNestedIdentity(), nullValue()); @@ -302,16 +294,12 @@ public class InnerHitsIT extends ParentChildTestCase { SearchHit searchHit = response.getHits().getAt(0); assertThat(searchHit.getId(), equalTo("3")); - assertThat(searchHit.getType(), equalTo("doc")); assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); - assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo("doc")); assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("1")); searchHit = response.getHits().getAt(1); assertThat(searchHit.getId(), equalTo("4")); - assertThat(searchHit.getType(), equalTo("doc")); assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); - assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo("doc")); assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("2")); } @@ -344,12 +332,10 @@ public class InnerHitsIT extends ParentChildTestCase { SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); assertThat(innerHits.getTotalHits().value, equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("3")); - assertThat(innerHits.getAt(0).getType(), equalTo("doc")); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); assertThat(innerHits.getTotalHits().value, equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("5")); - assertThat(innerHits.getAt(0).getType(), equalTo("doc")); response = client().prepareSearch("articles") .setQuery(hasChildQuery("comment", @@ -365,12 +351,10 @@ public class InnerHitsIT extends ParentChildTestCase { innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); assertThat(innerHits.getTotalHits().value, equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("4")); - assertThat(innerHits.getAt(0).getType(), equalTo("doc")); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); assertThat(innerHits.getTotalHits().value, equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("6")); - assertThat(innerHits.getAt(0).getType(), equalTo("doc")); } public void testRoyals() throws Exception { @@ -502,7 +486,7 @@ public class InnerHitsIT extends ParentChildTestCase { assertHitCount(response, 1); } - public void testNestedInnerHitWrappedInParentChildInnerhit() throws Exception { + public void testNestedInnerHitWrappedInParentChildInnerhit() { assertAcked(prepareCreate("test") .addMapping("doc", addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent_type", "child_type"), "nested_type", "nested"))); @@ -521,7 +505,7 @@ public class InnerHitsIT extends ParentChildTestCase { assertThat(hit.getInnerHits().get("child_type").getAt(0).getInnerHits().get("nested_type").getAt(0).field("_parent"), nullValue()); } - public void testInnerHitsWithIgnoreUnmapped() throws Exception { + public void testInnerHitsWithIgnoreUnmapped() { assertAcked(prepareCreate("index1") .addMapping("doc", addFieldMappings( buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent_type", "child_type"), @@ -545,7 +529,7 @@ public class InnerHitsIT extends ParentChildTestCase { assertSearchHits(response, "1", "3"); } - public void testTooHighResultWindow() throws Exception { + public void testTooHighResultWindow() { assertAcked(prepareCreate("index1") .addMapping("doc", addFieldMappings( buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent_type", "child_type"), diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java index 83441ef92d2b..f43214515be0 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java @@ -32,10 +32,10 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.hamcrest.Matchers; @@ -111,7 +111,7 @@ public class ParentIdQueryBuilderTests extends AbstractQueryTestCase terms(int[] intervals, String... values) { - Set queryExtractions = new HashSet<>(); - for (int interval : intervals) { - byte[] encodedInterval = new byte[4]; - IntPoint.encodeDimension(interval, encodedInterval, 0); - queryExtractions.add(new QueryAnalyzer.QueryExtraction(new QueryAnalyzer.Range("_field", null, null, encodedInterval))); - } - for (String value : values) { - queryExtractions.add(new QueryExtraction(new Term("_field", value))); - } - return queryExtractions; - } - } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java index 30c4f037b2a5..aec3abbaf204 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -31,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchShardTarget; @@ -74,7 +72,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase { SearchHit[] hits = new SearchHit[6]; for (int i = 0; i < 6; i++) { rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); - hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); + hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); @@ -124,7 +122,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase { rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); } } - hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); + hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); @@ -181,7 +179,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase { // only create four hits SearchHit[] hits = new SearchHit[4]; for (int i = 0; i < 4; i++) { - hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); + hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java index 7a3b9e3061a2..4d42074dca5a 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.rankeval; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -30,7 +29,6 @@ import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchShardTarget; @@ -117,7 +115,7 @@ public class ExpectedReciprocalRankTests extends ESTestCase { if (relevanceRatings[i] != null) { rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); } - hits[i] = new SearchHit(i, Integer.toString(i), new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); + hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } return hits; diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java index 6b37fee3c5e8..1c1b36a84609 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.rankeval; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -204,7 +203,7 @@ public class MeanReciprocalRankTests extends ESTestCase { private static SearchHit[] createSearchHits(int from, int to, String index) { SearchHit[] hits = new SearchHit[to + 1 - from]; for (int i = from; i <= to; i++) { - hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap()); + hits[i] = new SearchHit(i, i + "", Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); } return hits; diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java index 1d6fdcf4e862..aa5be25f86ae 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.rankeval; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -30,7 +29,6 @@ import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchShardTarget; @@ -112,7 +110,7 @@ public class PrecisionAtKTests extends ESTestCase { rated.add(createRatedDoc("test", "1", RELEVANT_RATING_1)); // add an unlabeled search hit SearchHit[] searchHits = Arrays.copyOf(toSearchHits(rated, "test"), 3); - searchHits[2] = new SearchHit(2, "2", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); + searchHits[2] = new SearchHit(2, "2", Collections.emptyMap()); searchHits[2].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", searchHits, rated); @@ -131,7 +129,7 @@ public class PrecisionAtKTests extends ESTestCase { public void testNoRatedDocs() throws Exception { SearchHit[] hits = new SearchHit[5]; for (int i = 0; i < 5; i++) { - hits[i] = new SearchHit(i, i + "", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); + hits[i] = new SearchHit(i, i + "", Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList()); @@ -253,7 +251,7 @@ public class PrecisionAtKTests extends ESTestCase { private static SearchHit[] toSearchHits(List rated, String index) { SearchHit[] hits = new SearchHit[rated.size()]; for (int i = 0; i < rated.size(); i++) { - hits[i] = new SearchHit(i, i + "", new Text(""), Collections.emptyMap()); + hits[i] = new SearchHit(i, i + "", Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); } return hits; diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java index 649db936d4fb..9abefd447e79 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java @@ -31,20 +31,17 @@ import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.TestSearchContext; import java.io.IOException; import java.util.ArrayList; @@ -58,6 +55,7 @@ import java.util.function.Predicate; import static java.util.Collections.singleton; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.test.TestSearchContext.SHARD_TARGET; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.Matchers.instanceOf; @@ -67,7 +65,7 @@ public class RankEvalResponseTests extends ESTestCase { private static final Exception[] RANDOM_EXCEPTIONS = new Exception[] { new ClusterBlockException(singleton(NoMasterBlockService.NO_MASTER_BLOCK_WRITES)), new CircuitBreakingException("Data too large", 123, 456, CircuitBreaker.Durability.PERMANENT), - new SearchParseException(new TestSearchContext(null), "Parse failure", new XContentLocation(12, 98)), + new SearchParseException(SHARD_TARGET, "Parse failure", new XContentLocation(12, 98)), new IllegalArgumentException("Closed resource", new RuntimeException("Resource")), new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[] { new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), @@ -161,9 +159,9 @@ public class RankEvalResponseTests extends ESTestCase { " \"coffee_query\": {" + " \"metric_score\": 0.1," + " \"unrated_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}]," + - " \"hits\":[{\"hit\":{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"123\",\"_score\":1.0}," + + " \"hits\":[{\"hit\":{\"_index\":\"index\",\"_id\":\"123\",\"_score\":1.0}," + " \"rating\":5}," + - " {\"hit\":{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"456\",\"_score\":1.0}," + + " {\"hit\":{\"_index\":\"index\",\"_id\":\"456\",\"_score\":1.0}," + " \"rating\":null}" + " ]" + " }" + @@ -181,7 +179,7 @@ public class RankEvalResponseTests extends ESTestCase { } private static RatedSearchHit searchHit(String index, int docId, Integer rating) { - SearchHit hit = new SearchHit(docId, docId + "", new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); + SearchHit hit = new SearchHit(docId, docId + "", Collections.emptyMap()); hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); hit.score(1.0f); return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty()); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java index 384503a28016..a83cff03b3bb 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java @@ -21,11 +21,9 @@ package org.elasticsearch.index.rankeval; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESTestCase; @@ -41,8 +39,7 @@ public class RatedSearchHitTests extends ESTestCase { public static RatedSearchHit randomRatedSearchHit() { OptionalInt rating = randomBoolean() ? OptionalInt.empty() : OptionalInt.of(randomIntBetween(0, 5)); - SearchHit searchHit = new SearchHit(randomIntBetween(0, 10), randomAlphaOfLength(10), - new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); + SearchHit searchHit = new SearchHit(randomIntBetween(0, 10), randomAlphaOfLength(10), Collections.emptyMap()); RatedSearchHit ratedSearchHit = new RatedSearchHit(searchHit, rating); return ratedSearchHit; } @@ -55,8 +52,7 @@ public class RatedSearchHitTests extends ESTestCase { rating = rating.isPresent() ? OptionalInt.of(rating.getAsInt() + 1) : OptionalInt.of(randomInt(5)); break; case 1: - hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10), - new Text(MapperService.SINGLE_MAPPING_NAME), Collections.emptyMap()); + hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10), Collections.emptyMap()); break; default: throw new IllegalStateException("The test should only allow two parameters mutated"); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java index aeca7caf2d44..2839f8a822cd 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -45,7 +45,6 @@ import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IndexFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.index.mapper.VersionFieldMapper; import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; import org.elasticsearch.script.Script; @@ -195,7 +194,7 @@ public abstract class AbstractAsyncBulkByScrollAction context = new HashMap<>(); context.put(IndexFieldMapper.NAME, doc.getIndex()); - context.put(TypeFieldMapper.NAME, doc.getType()); context.put(IdFieldMapper.NAME, doc.getId()); Long oldVersion = doc.getVersion(); context.put(VersionFieldMapper.NAME, oldVersion); @@ -789,10 +763,6 @@ public abstract class AbstractAsyncBulkByScrollAction delete = wrap(new DeleteRequest(request.getIndex(), request.getType(), request.getId())); + RequestWrapper delete = wrap(new DeleteRequest(request.getIndex(), request.getId())); delete.setVersion(request.getVersion()); delete.setVersionType(VersionType.INTERNAL); delete.setRouting(request.getRouting()); @@ -839,8 +809,6 @@ public abstract class AbstractAsyncBulkByScrollAction request, Object to); - protected abstract void scriptChangedType(RequestWrapper request, Object to); - protected abstract void scriptChangedId(RequestWrapper request, Object to); protected abstract void scriptChangedVersion(RequestWrapper request, Object to); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AsyncDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AsyncDeleteByQueryAction.java index 7fd18306ddc4..e255b1703ae3 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AsyncDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AsyncDeleteByQueryAction.java @@ -48,7 +48,6 @@ public class AsyncDeleteByQueryAction extends AbstractAsyncBulkByScrollAction buildRequest(ScrollableHitSource.Hit doc) { DeleteRequest delete = new DeleteRequest(); delete.index(doc.getIndex()); - delete.type(doc.getType()); delete.id(doc.getId()); delete.setIfSeqNo(doc.getSeqNo()); delete.setIfPrimaryTerm(doc.getPrimaryTerm()); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/Reindexer.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/Reindexer.java index 695b659443d9..39879845bdca 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/Reindexer.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/Reindexer.java @@ -224,13 +224,6 @@ public class Reindexer { // Copy the index from the request so we always write where it asked to write index.index(mainRequest.getDestination().index()); - // If the request override's type then the user wants all documents in that type. Otherwise keep the doc's type. - if (mainRequest.getDestination().type() == null) { - index.type(doc.getType()); - } else { - index.type(mainRequest.getDestination().type()); - } - /* * Internal versioning can just use what we copied from the destination request. Otherwise we assume we're using external * versioning and use the doc's version. @@ -322,12 +315,6 @@ public class Reindexer { request.setIndex(to.toString()); } - @Override - protected void scriptChangedType(RequestWrapper request, Object to) { - requireNonNull(to, "Can't reindex without a destination type!"); - request.setType(to.toString()); - } - @Override protected void scriptChangedId(RequestWrapper request, Object to) { request.setId(Objects.toString(to, null)); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java index b98b20e390e5..a645a3222ae0 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java @@ -33,7 +33,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IndexFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; -import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.tasks.Task; @@ -103,7 +102,6 @@ public class TransportUpdateByQueryAction extends HandledTransportAction buildRequest(ScrollableHitSource.Hit doc) { IndexRequest index = new IndexRequest(); index.index(doc.getIndex()); - index.type(doc.getType()); index.id(doc.getId()); index.source(doc.getSource(), doc.getXContentType()); index.setIfSeqNo(doc.getSeqNo()); @@ -124,11 +122,6 @@ public class TransportUpdateByQueryAction extends HandledTransportAction request, Object to) { - throw new IllegalArgumentException("Modifying [" + TypeFieldMapper.NAME + "] not allowed"); - } - @Override protected void scriptChangedId(RequestWrapper request, Object to) { throw new IllegalArgumentException("Modifying [" + IdFieldMapper.NAME + "] not allowed"); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java index 40c4ba757d17..d474e39b7472 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java @@ -176,15 +176,6 @@ final class RemoteRequestBuilders { } } - private static void checkIndexOrType(String name, String indexOrType) { - if (indexOrType.indexOf(',') >= 0) { - throw new IllegalArgumentException(name + " containing [,] not supported but got [" + indexOrType + "]"); - } - if (indexOrType.indexOf('/') >= 0) { - throw new IllegalArgumentException(name + " containing [/] not supported but got [" + indexOrType + "]"); - } - } - private static String sortToUri(SortBuilder sort) { if (sort instanceof FieldSortBuilder) { FieldSortBuilder f = (FieldSortBuilder) sort; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java index 664941127b78..1029b76d0f94 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java @@ -62,14 +62,12 @@ final class RemoteResponseParsers { new ConstructingObjectParser<>("hit", true, a -> { int i = 0; String index = (String) a[i++]; - String type = (String) a[i++]; String id = (String) a[i++]; Long version = (Long) a[i++]; - return new BasicHit(index, type, id, version == null ? -1 : version); + return new BasicHit(index, id, version == null ? -1 : version); }); static { HIT_PARSER.declareString(constructorArg(), new ParseField("_index")); - HIT_PARSER.declareString(constructorArg(), new ParseField("_type")); HIT_PARSER.declareString(constructorArg(), new ParseField("_id")); HIT_PARSER.declareLong(optionalConstructorArg(), new ParseField("_version")); HIT_PARSER.declareObject(((basicHit, tuple) -> basicHit.setSource(tuple.v1(), tuple.v2())), (p, s) -> { diff --git a/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java index 32f35d29a9e4..f89181c11afe 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java +++ b/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java @@ -195,7 +195,7 @@ public class ReindexDocumentationIT extends ESIntegTestCase { } } - public void testTasks() throws InterruptedException { + public void testTasks() throws Exception { final Client client = client(); final ReindexRequestBuilder builder = reindexAndPartiallyBlock(); @@ -279,7 +279,7 @@ public class ReindexDocumentationIT extends ESIntegTestCase { * Similar to what CancelTests does: blocks some operations to be able to catch some tasks in running state * @see CancelTests#testCancel(String, AbstractBulkByScrollRequestBuilder, CancelTests.CancelAssertion, Matcher) */ - private ReindexRequestBuilder reindexAndPartiallyBlock() throws InterruptedException { + private ReindexRequestBuilder reindexAndPartiallyBlock() throws Exception { final Client client = client(); final int numDocs = randomIntBetween(10, 100); ALLOWED_OPERATIONS.release(numDocs); @@ -305,9 +305,12 @@ public class ReindexDocumentationIT extends ESIntegTestCase { builder.execute(); // 10 seconds is usually fine but on heavily loaded machines this can take a while - assertTrue("updates blocked", awaitBusy( - () -> ALLOWED_OPERATIONS.hasQueuedThreads() && ALLOWED_OPERATIONS.availablePermits() == 0, - 1, TimeUnit.MINUTES)); + assertBusy( + () -> { + assertTrue("Expected some queued threads", ALLOWED_OPERATIONS.hasQueuedThreads()); + assertEquals("Expected that no permits are available", 0, ALLOWED_OPERATIONS.availablePermits()); + }, + 1, TimeUnit.MINUTES); return builder; } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionMetadataTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionMetadataTestCase.java index 8d67a3bd6760..439c51c1a471 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionMetadataTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionMetadataTestCase.java @@ -25,7 +25,7 @@ public abstract class AbstractAsyncBulkByScrollActionMetadataTestCase< extends AbstractAsyncBulkByScrollActionTestCase { protected ScrollableHitSource.BasicHit doc() { - return new ScrollableHitSource.BasicHit("index", "type", "id", 0); + return new ScrollableHitSource.BasicHit("index", "id", 0); } protected abstract AbstractAsyncBulkByScrollAction action(); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java index 76bf2a9160f5..1f97f45326e0 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java @@ -53,8 +53,8 @@ public abstract class AbstractAsyncBulkByScrollActionScriptTestCase< @SuppressWarnings("unchecked") protected T applyScript(Consumer> scriptBody) { - IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar")); - ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "type", "id", 0); + IndexRequest index = new IndexRequest("index").id("1").source(singletonMap("foo", "bar")); + ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "id", 0); UpdateScript.Factory factory = (params, ctx) -> new UpdateScript(Collections.emptyMap(), ctx) { @Override public void execute() { @@ -67,11 +67,6 @@ public abstract class AbstractAsyncBulkByScrollActionScriptTestCase< return (result != null) ? (T) result.self() : null; } - public void testTypeDeprecation() { - applyScript((Map ctx) -> ctx.get("_type")); - assertWarnings("[types removal] Looking up doc types [_type] in scripts is deprecated."); - } - public void testScriptAddingJunkToCtxIsError() { try { applyScript((Map ctx) -> ctx.put("junk", "junk")); @@ -90,16 +85,9 @@ public abstract class AbstractAsyncBulkByScrollActionScriptTestCase< assertEquals("cat", index.sourceAsMap().get("bar")); } - public void testSetOpTypeNoop() throws Exception { - assertThat(task.getStatus().getNoops(), equalTo(0L)); - assertNull(applyScript((Map ctx) -> ctx.put("op", OpType.NOOP.toString()))); - assertThat(task.getStatus().getNoops(), equalTo(1L)); - } - public void testSetOpTypeDelete() throws Exception { DeleteRequest delete = applyScript((Map ctx) -> ctx.put("op", OpType.DELETE.toString())); assertThat(delete.index(), equalTo("index")); - assertThat(delete.type(), equalTo("type")); assertThat(delete.id(), equalTo("1")); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index 1e9aacb5afa6..d4b154fb97d3 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -23,10 +23,10 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.DocWriteResponse.Result; @@ -60,7 +60,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; @@ -266,7 +265,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { public void testScrollResponseBatchingBehavior() throws Exception { int maxBatches = randomIntBetween(0, 100); for (int batches = 1; batches < maxBatches; batches++) { - Hit hit = new ScrollableHitSource.BasicHit("index", "type", "id", 0); + Hit hit = new ScrollableHitSource.BasicHit("index", "id", 0); ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 1, singletonList(hit), null); DummyAsyncBulkByScrollAction action = new DummyAsyncBulkByScrollAction(); simulateScrollResponse(action, timeValueNanos(System.nanoTime()), 0, response); @@ -421,7 +420,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { throw new RuntimeException("surprise"); } }; - ScrollableHitSource.BasicHit hit = new ScrollableHitSource.BasicHit("index", "type", "id", 0); + ScrollableHitSource.BasicHit hit = new ScrollableHitSource.BasicHit("index", "id", 0); hit.setSource(new BytesArray("{}"), XContentType.JSON); ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 1, singletonList(hit), null); simulateScrollResponse(action, timeValueNanos(System.nanoTime()), 0, response); @@ -505,7 +504,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { action.start(); // create a simulated response. - SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap()).sourceRef(new BytesArray("{}")); + SearchHit hit = new SearchHit(0, "id", emptyMap()).sourceRef(new BytesArray("{}")); SearchHits hits = new SearchHits(IntStream.range(0, 100).mapToObj(i -> hit).toArray(SearchHit[]::new), new TotalHits(0, TotalHits.Relation.EQUAL_TO),0); InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java index 7eaa989b88a3..6e6ca81fdfc1 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java @@ -62,7 +62,6 @@ import static org.hamcrest.Matchers.hasSize; public class CancelTests extends ReindexTestCase { protected static final String INDEX = "reindex-cancel-index"; - protected static final String TYPE = "reindex-cancel-type"; // Semaphore used to allow & block indexing operations during the test private static final Semaphore ALLOWED_OPERATIONS = new Semaphore(0); @@ -93,7 +92,7 @@ public class CancelTests extends ReindexTestCase { logger.debug("setting up [{}] docs", numDocs); indexRandom(true, false, true, IntStream.range(0, numDocs) - .mapToObj(i -> client().prepareIndex(INDEX, TYPE, String.valueOf(i)).setSource("n", i)) + .mapToObj(i -> client().prepareIndex(INDEX, "_doc", String.valueOf(i)).setSource("n", i)) .collect(Collectors.toList())); // Checks that the all documents have been indexed and correctly counted @@ -118,10 +117,9 @@ public class CancelTests extends ReindexTestCase { * exhausted their slice while others might have quite a bit left * to work on. We can't control that. */ logger.debug("waiting for updates to be blocked"); - boolean blocked = awaitBusy( - () -> ALLOWED_OPERATIONS.hasQueuedThreads() && ALLOWED_OPERATIONS.availablePermits() == 0, + assertBusy( + () -> assertTrue("updates blocked", ALLOWED_OPERATIONS.hasQueuedThreads() && ALLOWED_OPERATIONS.availablePermits() == 0), 1, TimeUnit.MINUTES); // 10 seconds is usually fine but on heavily loaded machines this can take a while - assertTrue("updates blocked", blocked); // Status should show the task running TaskInfo mainTask = findTaskToCancel(action, builder.request().getSlices()); @@ -210,12 +208,12 @@ public class CancelTests extends ReindexTestCase { } public void testReindexCancel() throws Exception { - testCancel(ReindexAction.NAME, reindex().source(INDEX).destination("dest", TYPE), (response, total, modified) -> { + testCancel(ReindexAction.NAME, reindex().source(INDEX).destination("dest", "_doc"), (response, total, modified) -> { assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request"))); refresh("dest"); assertHitCount(client().prepareSearch("dest").setSize(0).get(), modified); - }, equalTo("reindex from [" + INDEX + "] to [dest][" + TYPE + "]")); + }, equalTo("reindex from [" + INDEX + "] to [dest][_doc]")); } public void testUpdateByQueryCancel() throws Exception { @@ -245,13 +243,13 @@ public class CancelTests extends ReindexTestCase { public void testReindexCancelWithWorkers() throws Exception { testCancel(ReindexAction.NAME, - reindex().source(INDEX).filter(QueryBuilders.matchAllQuery()).destination("dest", TYPE).setSlices(5), + reindex().source(INDEX).filter(QueryBuilders.matchAllQuery()).destination("dest", "_doc").setSlices(5), (response, total, modified) -> { assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); refresh("dest"); assertHitCount(client().prepareSearch("dest").setSize(0).get(), modified); }, - equalTo("reindex from [" + INDEX + "] to [dest][" + TYPE + "]")); + equalTo("reindex from [" + INDEX + "] to [dest][" + "_doc" + "]")); } public void testUpdateByQueryCancelWithWorkers() throws Exception { @@ -309,7 +307,7 @@ public class CancelTests extends ReindexTestCase { } private T preCheck(T operation, String type) { - if ((TYPE.equals(type) == false) || (operation.origin() != Origin.PRIMARY)) { + if ((operation.origin() != Origin.PRIMARY)) { return operation; } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ClientScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ClientScrollableHitSourceTests.java index 37425a7c600e..a6f3d4f28bdb 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ClientScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ClientScrollableHitSourceTests.java @@ -34,7 +34,6 @@ import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.client.support.AbstractClient; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.search.SearchHit; @@ -160,7 +159,7 @@ public class ClientScrollableHitSourceTests extends ESTestCase { private SearchResponse createSearchResponse() { // create a simulated response. - SearchHit hit = new SearchHit(0, "id", new Text("type"), emptyMap()).sourceRef(new BytesArray("{}")); + SearchHit hit = new SearchHit(0, "id", emptyMap()).sourceRef(new BytesArray("{}")); SearchHits hits = new SearchHits(IntStream.range(0, randomIntBetween(0, 20)).mapToObj(i -> hit).toArray(SearchHit[]::new), new TotalHits(0, TotalHits.Relation.EQUAL_TO),0); InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java index 54a2ea96b25f..44f47cdfba58 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java @@ -47,20 +47,6 @@ public class ReindexScriptTests extends AbstractAsyncBulkByScrollActionScriptTes } } - public void testSetType() throws Exception { - Object type = randomFrom(new Object[] {234, 234L, "pancake"}); - IndexRequest index = applyScript((Map ctx) -> ctx.put("_type", type)); - assertEquals(type.toString(), index.type()); - } - - public void testSettingTypeToNullIsError() throws Exception { - try { - applyScript((Map ctx) -> ctx.put("_type", null)); - } catch (NullPointerException e) { - assertThat(e.getMessage(), containsString("Can't reindex without a destination type!")); - } - } - public void testSetId() throws Exception { Object id = randomFrom(new Object[] {null, 234, 234L, "pancake"}); IndexRequest index = applyScript((Map ctx) -> ctx.put("_id", id)); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java index 607969a6ff2c..b1c13e489319 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java @@ -42,7 +42,7 @@ public class UpdateByQueryWithScriptTests * error message to the user, not some ClassCastException. */ Object[] options = new Object[] {"cat", new Object(), 123, new Date(), Math.PI}; - for (String ctxVar: new String[] {"_index", "_type", "_id", "_version", "_routing"}) { + for (String ctxVar: new String[] {"_index", "_id", "_version", "_routing"}) { try { applyScript((Map ctx) -> ctx.put(ctxVar, randomFrom(options))); } catch (IllegalArgumentException e) { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index f9e952baa127..d08b6494d4a2 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -165,7 +165,6 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { assertThat(r.getFailures(), empty()); assertThat(r.getHits(), hasSize(1)); assertEquals("test", r.getHits().get(0).getIndex()); - assertEquals("test", r.getHits().get(0).getType()); assertEquals("AVToMiC250DjIiBO3yJ_", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test2\"}", r.getHits().get(0).getSource().utf8ToString()); assertNull(r.getHits().get(0).getRouting()); @@ -183,7 +182,6 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { assertThat(r.getFailures(), empty()); assertThat(r.getHits(), hasSize(1)); assertEquals("test", r.getHits().get(0).getIndex()); - assertEquals("test", r.getHits().get(0).getType()); assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); assertNull(r.getHits().get(0).getRouting()); @@ -233,7 +231,6 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { assertThat(r.getFailures(), empty()); assertThat(r.getHits(), hasSize(1)); assertEquals("test", r.getHits().get(0).getIndex()); - assertEquals("test", r.getHits().get(0).getType()); assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); assertNull(r.getHits().get(0).getRouting()); @@ -261,7 +258,6 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { + "completed tasks = 4182]]", r.getFailures().get(0).getReason().getMessage()); assertThat(r.getHits(), hasSize(1)); assertEquals("test", r.getHits().get(0).getIndex()); - assertEquals("test", r.getHits().get(0).getType()); assertEquals("AVToMiC250DjIiBO3yJ_", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test1\"}", r.getHits().get(0).getSource().utf8ToString()); called.set(true); @@ -290,7 +286,6 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { r.getFailures().get(0).getReason().getMessage()); assertThat(r.getHits(), hasSize(1)); assertEquals("test", r.getHits().get(0).getIndex()); - assertEquals("test", r.getHits().get(0).getType()); assertEquals("10000", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test10000\"}", r.getHits().get(0).getSource().utf8ToString()); called.set(true); diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yml index 44971c49c358..0567eab51526 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yml @@ -246,7 +246,7 @@ indices.refresh: {} - do: - catch: /\[test\]\[_doc\]\[1\] didn't store _source/ + catch: /\[test\]\[1\] didn't store _source/ reindex: body: source: diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/20_validation.yml b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/20_validation.yml index 21644b393298..f4f454c8f049 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/20_validation.yml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/20_validation.yml @@ -98,7 +98,7 @@ indices.refresh: {} - do: - catch: /\[test\]\[_doc\]\[1\] didn't store _source/ + catch: /\[test\]\[1\] didn't store _source/ update_by_query: index: test diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java index 3f7f8d7739de..aee47759f32f 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java @@ -73,22 +73,6 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase { headTestCase("/test", singletonMap("pretty", "true"), greaterThan(0)); } - public void testTypeExists() throws IOException { - createTestDoc(); - headTestCase("/test/_mapping/_doc", emptyMap(), OK.getStatus(), greaterThan(0), - "Type exists requests are deprecated, as types have been deprecated."); - headTestCase("/test/_mapping/_doc", singletonMap("pretty", "true"), OK.getStatus(), greaterThan(0), - "Type exists requests are deprecated, as types have been deprecated."); - } - - public void testTypeDoesNotExist() throws IOException { - createTestDoc(); - headTestCase("/test/_mapping/does-not-exist", emptyMap(), NOT_FOUND.getStatus(), greaterThan(0), - "Type exists requests are deprecated, as types have been deprecated."); - headTestCase("/text/_mapping/test,does-not-exist", emptyMap(), NOT_FOUND.getStatus(), greaterThan(0), - "Type exists requests are deprecated, as types have been deprecated."); - } - public void testAliasExists() throws IOException { createTestDoc(); try (XContentBuilder builder = jsonBuilder()) { diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java index 31484c8ade76..a9f9dc3ca4b9 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java @@ -22,9 +22,7 @@ package org.elasticsearch.index.analysis; import com.ibm.icu.text.FilteredNormalizer2; import com.ibm.icu.text.Normalizer2; import com.ibm.icu.text.UnicodeSet; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.TokenStream; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -37,9 +35,6 @@ import org.elasticsearch.index.IndexSettings; */ public class IcuNormalizerTokenFilterFactory extends AbstractTokenFilterFactory implements NormalizingTokenFilterFactory { - private static final DeprecationLogger deprecationLogger = - new DeprecationLogger(LogManager.getLogger(IcuNormalizerTokenFilterFactory.class)); - private final Normalizer2 normalizer; public IcuNormalizerTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { diff --git a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java index ca0f315e7a4b..094691acb55a 100644 --- a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java +++ b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java @@ -30,13 +30,11 @@ import org.apache.commons.codec.language.bm.Languages.LanguageSet; import org.apache.commons.codec.language.bm.NameType; import org.apache.commons.codec.language.bm.PhoneticEngine; import org.apache.commons.codec.language.bm.RuleType; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.phonetic.BeiderMorseFilter; import org.apache.lucene.analysis.phonetic.DaitchMokotoffSoundexFilter; import org.apache.lucene.analysis.phonetic.DoubleMetaphoneFilter; import org.apache.lucene.analysis.phonetic.PhoneticFilter; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -49,10 +47,6 @@ import java.util.List; public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory { - - private static final DeprecationLogger DEPRECATION_LOGGER - = new DeprecationLogger(LogManager.getLogger(PhoneticTokenFilterFactory.class)); - private final Encoder encoder; private final boolean replace; private int maxcodelength; diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 6935de286b23..e9a2d12b8de9 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -23,7 +23,7 @@ esplugin { } versions << [ - 'aws': '1.11.562' + 'aws': '1.11.636' ] dependencies { diff --git a/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.562.jar.sha1 b/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.562.jar.sha1 deleted file mode 100644 index ed8ded6a3608..000000000000 --- a/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b5fc47ec1b5afe180f5ebb4eda755acdca7a20ae \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.636.jar.sha1 b/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.636.jar.sha1 new file mode 100644 index 000000000000..b9ee9c102dbc --- /dev/null +++ b/plugins/discovery-ec2/licenses/aws-java-sdk-core-1.11.636.jar.sha1 @@ -0,0 +1 @@ +84c9f180f8f60f6f1433c9c5253fcb704593b121 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.562.jar.sha1 b/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.562.jar.sha1 deleted file mode 100644 index 040d28de70b2..000000000000 --- a/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0211a055fb3e036033af4b1ca25ada0574a756ec \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.636.jar.sha1 b/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.636.jar.sha1 new file mode 100644 index 000000000000..ed737c808c1d --- /dev/null +++ b/plugins/discovery-ec2/licenses/aws-java-sdk-ec2-1.11.636.jar.sha1 @@ -0,0 +1 @@ +d32fc4ae314dbee9717302a3119cba0f735c04b1 \ No newline at end of file diff --git a/plugins/repository-azure/qa/microsoft-azure-storage/src/test/resources/rest-api-spec/test/repository_azure/10_repository.yml b/plugins/repository-azure/qa/microsoft-azure-storage/src/test/resources/rest-api-spec/test/repository_azure/10_repository.yml index fade1f9f1e67..095581807db1 100644 --- a/plugins/repository-azure/qa/microsoft-azure-storage/src/test/resources/rest-api-spec/test/repository_azure/10_repository.yml +++ b/plugins/repository-azure/qa/microsoft-azure-storage/src/test/resources/rest-api-spec/test/repository_azure/10_repository.yml @@ -16,8 +16,12 @@ setup: --- "Snapshot/Restore with repository-azure": - skip: - version: " - 7.9.99" - reason: "8.0 changes get snapshots response format" + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/47201" +# BELOW 3 lines to be uncommented when un-muting +# - skip: +# version: " - 7.9.99" +# reason: "8.0 changes get snapshots response format" # Get repository - do: diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java index 600715209f52..5162f1a22369 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java @@ -59,7 +59,7 @@ final class AzureStorageSettings { key -> SecureSetting.secureString(key, null)); /** max_retries: Number of retries in case of Azure errors. Defaults to 3 (RetryPolicy.DEFAULT_CLIENT_RETRY_COUNT). */ - public static final Setting MAX_RETRIES_SETTING = + public static final AffixSetting MAX_RETRIES_SETTING = Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "max_retries", (key) -> Setting.intSetting(key, RetryPolicy.DEFAULT_CLIENT_RETRY_COUNT, Setting.Property.NodeScope), ACCOUNT_SETTING, KEY_SETTING); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java new file mode 100644 index 000000000000..0aa7a3b0922f --- /dev/null +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java @@ -0,0 +1,311 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.azure; + +import com.microsoft.azure.storage.Constants; +import com.microsoft.azure.storage.RetryExponentialRetry; +import com.microsoft.azure.storage.RetryPolicyFactory; +import com.microsoft.azure.storage.blob.BlobRequestOptions; +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpServer; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; +import org.elasticsearch.common.lucene.store.InputStreamIndexInput; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.mocksocket.MockHttpServer; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.io.ByteArrayOutputStream; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.nio.file.NoSuchFileException; +import java.util.Arrays; +import java.util.Base64; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes; +import static org.elasticsearch.repositories.azure.AzureRepository.Repository.CONTAINER_SETTING; +import static org.elasticsearch.repositories.azure.AzureStorageSettings.ACCOUNT_SETTING; +import static org.elasticsearch.repositories.azure.AzureStorageSettings.ENDPOINT_SUFFIX_SETTING; +import static org.elasticsearch.repositories.azure.AzureStorageSettings.KEY_SETTING; +import static org.elasticsearch.repositories.azure.AzureStorageSettings.MAX_RETRIES_SETTING; +import static org.elasticsearch.repositories.azure.AzureStorageSettings.TIMEOUT_SETTING; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThan; + +/** + * This class tests how a {@link AzureBlobContainer} and its underlying SDK client are retrying requests when reading or writing blobs. + */ +@SuppressForbidden(reason = "use a http server") +public class AzureBlobContainerRetriesTests extends ESTestCase { + + private HttpServer httpServer; + private ThreadPool threadPool; + + @Before + public void setUp() throws Exception { + threadPool = new TestThreadPool(getTestClass().getName(), AzureRepositoryPlugin.executorBuilder()); + httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); + httpServer.start(); + super.setUp(); + } + + @After + public void tearDown() throws Exception { + httpServer.stop(0); + super.tearDown(); + ThreadPool.terminate(threadPool, 10L, TimeUnit.SECONDS); + } + + private BlobContainer createBlobContainer(final int maxRetries) { + final Settings.Builder clientSettings = Settings.builder(); + final String clientName = randomAlphaOfLength(5).toLowerCase(Locale.ROOT); + + final InetSocketAddress address = httpServer.getAddress(); + final String endpoint = "ignored;DefaultEndpointsProtocol=http;BlobEndpoint=http://" + + InetAddresses.toUriString(address.getAddress()) + ":" + address.getPort(); + clientSettings.put(ENDPOINT_SUFFIX_SETTING.getConcreteSettingForNamespace(clientName).getKey(), endpoint); + clientSettings.put(MAX_RETRIES_SETTING.getConcreteSettingForNamespace(clientName).getKey(), maxRetries); + clientSettings.put(TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), TimeValue.timeValueMillis(500)); + + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(ACCOUNT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), "account"); + final String key = Base64.getEncoder().encodeToString(randomAlphaOfLength(10).getBytes(UTF_8)); + secureSettings.setString(KEY_SETTING.getConcreteSettingForNamespace(clientName).getKey(), key); + clientSettings.setSecureSettings(secureSettings); + + final AzureStorageService service = new AzureStorageService(clientSettings.build()) { + @Override + RetryPolicyFactory createRetryPolicy(final AzureStorageSettings azureStorageSettings) { + return new RetryExponentialRetry(1, 100, 500, azureStorageSettings.getMaxRetries()); + } + + @Override + BlobRequestOptions getBlobRequestOptionsForWriteBlob() { + BlobRequestOptions options = new BlobRequestOptions(); + options.setSingleBlobPutThresholdInBytes(Math.toIntExact(ByteSizeUnit.MB.toBytes(1))); + return options; + } + }; + + final RepositoryMetaData repositoryMetaData = new RepositoryMetaData("repository", AzureRepository.TYPE, + Settings.builder() + .put(CONTAINER_SETTING.getKey(), "container") + .put(ACCOUNT_SETTING.getKey(), clientName) + .build()); + + return new AzureBlobContainer(BlobPath.cleanPath(), new AzureBlobStore(repositoryMetaData, service, threadPool), threadPool); + } + + public void testReadNonexistentBlobThrowsNoSuchFileException() { + final BlobContainer blobContainer = createBlobContainer(between(1, 5)); + final Exception exception = expectThrows(NoSuchFileException.class, () -> blobContainer.readBlob("read_nonexistent_blob")); + assertThat(exception.getMessage().toLowerCase(Locale.ROOT), containsString("not found")); + } + + public void testReadBlobWithRetries() throws Exception { + final int maxRetries = randomIntBetween(1, 5); + final CountDown countDownHead = new CountDown(maxRetries); + final CountDown countDownGet = new CountDown(maxRetries); + final byte[] bytes = randomBlobContent(); + httpServer.createContext("/container/read_blob_max_retries", exchange -> { + Streams.readFully(exchange.getRequestBody()); + if ("HEAD".equals(exchange.getRequestMethod())) { + if (countDownHead.countDown()) { + exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); + exchange.getResponseHeaders().add("x-ms-blob-content-length", String.valueOf(bytes.length)); + exchange.getResponseHeaders().add("x-ms-blob-type", "blockblob"); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), -1); + exchange.close(); + return; + } + } else if ("GET".equals(exchange.getRequestMethod())) { + if (countDownGet.countDown()) { + final int rangeStart = getRangeStart(exchange); + assertThat(rangeStart, lessThan(bytes.length)); + final int length = bytes.length - rangeStart; + exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); + exchange.getResponseHeaders().add("x-ms-blob-content-length", String.valueOf(length)); + exchange.getResponseHeaders().add("x-ms-blob-type", "blockblob"); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), length); + exchange.getResponseBody().write(bytes, rangeStart, length); + exchange.close(); + return; + } + } + if (randomBoolean()) { + TestUtils.sendError(exchange, randomFrom(RestStatus.INTERNAL_SERVER_ERROR, RestStatus.SERVICE_UNAVAILABLE)); + } + exchange.close(); + }); + + final BlobContainer blobContainer = createBlobContainer(maxRetries); + try (InputStream inputStream = blobContainer.readBlob("read_blob_max_retries")) { + assertArrayEquals(bytes, BytesReference.toBytes(Streams.readFully(inputStream))); + assertThat(countDownHead.isCountedDown(), is(true)); + assertThat(countDownGet.isCountedDown(), is(true)); + } + } + + public void testWriteBlobWithRetries() throws Exception { + final int maxRetries = randomIntBetween(1, 5); + final CountDown countDown = new CountDown(maxRetries); + + final byte[] bytes = randomBlobContent(); + httpServer.createContext("/container/write_blob_max_retries", exchange -> { + if ("PUT".equals(exchange.getRequestMethod())) { + if (countDown.countDown()) { + final BytesReference body = Streams.readFully(exchange.getRequestBody()); + if (Objects.deepEquals(bytes, BytesReference.toBytes(body))) { + exchange.sendResponseHeaders(RestStatus.CREATED.getStatus(), -1); + } else { + TestUtils.sendError(exchange, RestStatus.BAD_REQUEST); + } + exchange.close(); + return; + } + + if (randomBoolean()) { + if (randomBoolean()) { + Streams.readFully(exchange.getRequestBody(), new byte[randomIntBetween(1, Math.max(1, bytes.length - 1))]); + } else { + Streams.readFully(exchange.getRequestBody()); + TestUtils.sendError(exchange, randomFrom(RestStatus.INTERNAL_SERVER_ERROR, RestStatus.SERVICE_UNAVAILABLE)); + } + } + exchange.close(); + } + }); + + final BlobContainer blobContainer = createBlobContainer(maxRetries); + try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", bytes), bytes.length)) { + blobContainer.writeBlob("write_blob_max_retries", stream, bytes.length, false); + } + assertThat(countDown.isCountedDown(), is(true)); + } + + public void testWriteLargeBlob() throws Exception { + final int maxRetries = randomIntBetween(1, 5); + + final int nbBlocks = randomIntBetween(1, 2); + final byte[] data = randomBytes(Constants.DEFAULT_STREAM_WRITE_IN_BYTES * nbBlocks); + + final int nbErrors = 2; // we want all requests to fail at least once + final AtomicInteger countDownUploads = new AtomicInteger(nbErrors * nbBlocks); + final CountDown countDownComplete = new CountDown(nbErrors); + + final Map blocks = new ConcurrentHashMap<>(); + httpServer.createContext("/container/write_large_blob", exchange -> { + + if ("PUT".equals(exchange.getRequestMethod())) { + final Map params = new HashMap<>(); + RestUtils.decodeQueryString(exchange.getRequestURI().getQuery(), 0, params); + + final String blockId = params.get("blockid"); + if (Strings.hasText(blockId) && (countDownUploads.decrementAndGet() % 2 == 0)) { + blocks.put(blockId, Streams.readFully(exchange.getRequestBody())); + exchange.sendResponseHeaders(RestStatus.CREATED.getStatus(), -1); + exchange.close(); + return; + } + + final String complete = params.get("comp"); + if ("blocklist".equals(complete) && (countDownComplete.countDown())) { + final String blockList = Streams.copyToString(new InputStreamReader(exchange.getRequestBody(), UTF_8)); + final List blockUids = Arrays.stream(blockList.split("")) + .filter(line -> line.contains("")) + .map(line -> line.substring(0, line.indexOf(""))) + .collect(Collectors.toList()); + + final ByteArrayOutputStream blob = new ByteArrayOutputStream(); + for (String blockUid : blockUids) { + BytesReference block = blocks.remove(blockUid); + assert block != null; + block.writeTo(blob); + } + assertArrayEquals(data, blob.toByteArray()); + exchange.sendResponseHeaders(RestStatus.CREATED.getStatus(), -1); + exchange.close(); + return; + } + } + + if (randomBoolean()) { + Streams.readFully(exchange.getRequestBody()); + TestUtils.sendError(exchange, randomFrom(RestStatus.INTERNAL_SERVER_ERROR, RestStatus.SERVICE_UNAVAILABLE)); + } + exchange.close(); + }); + + final BlobContainer blobContainer = createBlobContainer(maxRetries); + try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", data), data.length)) { + blobContainer.writeBlob("write_large_blob", stream, data.length * nbBlocks, false); + } + assertThat(countDownUploads.get(), equalTo(0)); + assertThat(countDownComplete.isCountedDown(), is(true)); + assertThat(blocks.isEmpty(), is(true)); + } + + private static byte[] randomBlobContent() { + return randomByteArrayOfLength(randomIntBetween(1, frequently() ? 512 : 1 << 20)); // rarely up to 1mb + } + + private static int getRangeStart(final HttpExchange exchange) { + final String rangeHeader = exchange.getRequestHeaders().getFirst("X-ms-range"); + if (rangeHeader == null) { + return 0; + } + + final Matcher matcher = Pattern.compile("^bytes=([0-9]+)-([0-9]+)$").matcher(rangeHeader); + assertTrue(rangeHeader + " matches expected pattern", matcher.matches()); + return Math.toIntExact(Long.parseLong(matcher.group(1))); + } +} diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index 2f12b1c61ffb..28993bd475a0 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -171,7 +171,7 @@ public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryInteg } else if (Regex.simpleMatch("HEAD /container/*", request)) { final BytesReference blob = blobs.get(exchange.getRequestURI().getPath()); if (blob == null) { - exchange.sendResponseHeaders(RestStatus.NOT_FOUND.getStatus(), -1); + TestUtils.sendError(exchange, RestStatus.NOT_FOUND); return; } exchange.getResponseHeaders().add("x-ms-blob-content-length", String.valueOf(blob.length())); @@ -181,7 +181,7 @@ public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryInteg } else if (Regex.simpleMatch("GET /container/*", request)) { final BytesReference blob = blobs.get(exchange.getRequestURI().getPath()); if (blob == null) { - exchange.sendResponseHeaders(RestStatus.NOT_FOUND.getStatus(), -1); + TestUtils.sendError(exchange, RestStatus.NOT_FOUND); return; } @@ -228,7 +228,7 @@ public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryInteg exchange.getResponseBody().write(response); } else { - exchange.sendResponseHeaders(RestStatus.BAD_REQUEST.getStatus(), -1); + TestUtils.sendError(exchange, RestStatus.BAD_REQUEST); } } finally { exchange.close(); @@ -249,6 +249,13 @@ public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryInteg super(delegate, maxErrorsPerRequest); } + @Override + protected void handleAsError(final HttpExchange exchange) throws IOException { + Streams.readFully(exchange.getRequestBody()); + TestUtils.sendError(exchange, randomFrom(RestStatus.INTERNAL_SERVER_ERROR, RestStatus.SERVICE_UNAVAILABLE)); + exchange.close(); + } + @Override protected String requestUniqueId(final HttpExchange exchange) { final String requestId = exchange.getRequestHeaders().getFirst(Constants.HeaderConstants.CLIENT_REQUEST_ID_HEADER); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/TestUtils.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/TestUtils.java new file mode 100644 index 000000000000..cdb64ecbcf58 --- /dev/null +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/TestUtils.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.azure; + +import com.microsoft.azure.storage.Constants; +import com.microsoft.azure.storage.StorageErrorCodeStrings; +import com.sun.net.httpserver.Headers; +import com.sun.net.httpserver.HttpExchange; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +final class TestUtils { + + private TestUtils() {} + + @SuppressForbidden(reason = "use HttpExchange and Headers") + static void sendError(final HttpExchange exchange, final RestStatus status) throws IOException { + final Headers headers = exchange.getResponseHeaders(); + headers.add("Content-Type", "application/xml"); + + final String requestId = exchange.getRequestHeaders().getFirst(Constants.HeaderConstants.CLIENT_REQUEST_ID_HEADER); + if (requestId != null) { + headers.add(Constants.HeaderConstants.REQUEST_ID_HEADER, requestId); + } + + final String errorCode = toAzureErrorCode(status); + if (errorCode != null) { + headers.add(Constants.HeaderConstants.ERROR_CODE, errorCode); + } + + if (errorCode == null || "HEAD".equals(exchange.getRequestMethod())) { + exchange.sendResponseHeaders(status.getStatus(), -1L); + } else { + final byte[] response = ("" + errorCode + "" + + status + "").getBytes(StandardCharsets.UTF_8); + exchange.sendResponseHeaders(status.getStatus(), response.length); + exchange.getResponseBody().write(response); + } + } + + // See https://docs.microsoft.com/en-us/rest/api/storageservices/common-rest-api-error-codes + private static String toAzureErrorCode(final RestStatus status) { + assert status.getStatus() >= 400; + switch (status) { + case BAD_REQUEST: + return StorageErrorCodeStrings.INVALID_METADATA; + case NOT_FOUND: + return StorageErrorCodeStrings.BLOB_NOT_FOUND; + case INTERNAL_SERVER_ERROR: + return StorageErrorCodeStrings.INTERNAL_ERROR; + default: + return null; + } + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java new file mode 100644 index 000000000000..714ea968ff08 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java @@ -0,0 +1,433 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.gcs; + +import com.google.api.gax.retrying.RetrySettings; +import com.google.cloud.http.HttpTransportOptions; +import com.google.cloud.storage.StorageException; +import com.google.cloud.storage.StorageOptions; +import com.sun.net.httpserver.HttpContext; +import com.sun.net.httpserver.HttpServer; +import org.apache.http.HttpStatus; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; +import org.elasticsearch.common.lucene.store.InputStreamIndexInput; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.mocksocket.MockHttpServer; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; +import org.junit.Before; +import org.threeten.bp.Duration; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.SocketTimeoutException; +import java.nio.file.NoSuchFileException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.ENDPOINT_SETTING; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.TOKEN_URI_SETTING; +import static org.elasticsearch.repositories.gcs.TestUtils.createServiceAccount; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +@SuppressForbidden(reason = "use a http server") +public class GoogleCloudStorageBlobContainerRetriesTests extends ESTestCase { + + private HttpServer httpServer; + + private String httpServerUrl() { + assertThat(httpServer, notNullValue()); + InetSocketAddress address = httpServer.getAddress(); + return "http://" + InetAddresses.toUriString(address.getAddress()) + ":" + address.getPort(); + } + + @Before + public void setUp() throws Exception { + httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); + httpServer.start(); + super.setUp(); + } + + @After + public void tearDown() throws Exception { + httpServer.stop(0); + super.tearDown(); + } + + private BlobContainer createBlobContainer(final int maxRetries, final @Nullable TimeValue readTimeout) { + final Settings.Builder clientSettings = Settings.builder(); + final String client = randomAlphaOfLength(5).toLowerCase(Locale.ROOT); + clientSettings.put(ENDPOINT_SETTING.getConcreteSettingForNamespace(client).getKey(), httpServerUrl()); + clientSettings.put(TOKEN_URI_SETTING.getConcreteSettingForNamespace(client).getKey(), httpServerUrl() + "/token"); + if (readTimeout != null) { + clientSettings.put(READ_TIMEOUT_SETTING.getConcreteSettingForNamespace(client).getKey(), readTimeout); + } + + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setFile(CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(client).getKey(), createServiceAccount(random())); + clientSettings.setSecureSettings(secureSettings); + + final GoogleCloudStorageService service = new GoogleCloudStorageService() { + @Override + StorageOptions createStorageOptions(final GoogleCloudStorageClientSettings clientSettings, + final HttpTransportOptions httpTransportOptions) { + StorageOptions options = super.createStorageOptions(clientSettings, httpTransportOptions); + return options.toBuilder() + .setRetrySettings(RetrySettings.newBuilder() + .setTotalTimeout(options.getRetrySettings().getTotalTimeout()) + .setInitialRetryDelay(Duration.ofMillis(10L)) + .setRetryDelayMultiplier(options.getRetrySettings().getRetryDelayMultiplier()) + .setMaxRetryDelay(Duration.ofSeconds(1L)) + .setMaxAttempts(maxRetries) + .setJittered(false) + .setInitialRpcTimeout(options.getRetrySettings().getInitialRpcTimeout()) + .setRpcTimeoutMultiplier(options.getRetrySettings().getRpcTimeoutMultiplier()) + .setMaxRpcTimeout(options.getRetrySettings().getMaxRpcTimeout()) + .build()) + .build(); + } + }; + service.refreshAndClearCache(GoogleCloudStorageClientSettings.load(clientSettings.build())); + + final List httpContexts = Arrays.asList( + // Auth + httpServer.createContext("/token", exchange -> { + byte[] response = ("{\"access_token\":\"foo\",\"token_type\":\"Bearer\",\"expires_in\":3600}").getBytes(UTF_8); + exchange.getResponseHeaders().add("Content-Type", "application/json"); + exchange.sendResponseHeaders(HttpStatus.SC_OK, response.length); + exchange.getResponseBody().write(response); + exchange.close(); + }), + // Does bucket exists? + httpServer.createContext("/storage/v1/b/bucket", exchange -> { + byte[] response = ("{\"kind\":\"storage#bucket\",\"name\":\"bucket\",\"id\":\"0\"}").getBytes(UTF_8); + exchange.getResponseHeaders().add("Content-Type", "application/json; charset=utf-8"); + exchange.sendResponseHeaders(HttpStatus.SC_OK, response.length); + exchange.getResponseBody().write(response); + exchange.close(); + }) + ); + + final GoogleCloudStorageBlobStore blobStore = new GoogleCloudStorageBlobStore("bucket", client, service); + httpContexts.forEach(httpContext -> httpServer.removeContext(httpContext)); + + return new GoogleCloudStorageBlobContainer(BlobPath.cleanPath(), blobStore); + } + + public void testReadNonexistentBlobThrowsNoSuchFileException() { + final BlobContainer blobContainer = createBlobContainer(between(1, 5), null); + final Exception exception = expectThrows(NoSuchFileException.class, + () -> Streams.readFully(blobContainer.readBlob("read_nonexistent_blob"))); + assertThat(exception.getMessage().toLowerCase(Locale.ROOT), containsString("blob [read_nonexistent_blob] does not exist")); + } + + public void testReadBlobWithRetries() throws Exception { + final int maxRetries = randomIntBetween(2, 10); + final CountDown countDown = new CountDown(maxRetries); + + final byte[] bytes = randomBlobContent(); + httpServer.createContext("/download/storage/v1/b/bucket/o/read_blob_max_retries", exchange -> { + Streams.readFully(exchange.getRequestBody()); + if (countDown.countDown()) { + exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), bytes.length); + exchange.getResponseBody().write(bytes); + exchange.close(); + return; + } + exchange.sendResponseHeaders(HttpStatus.SC_INTERNAL_SERVER_ERROR, -1); + if (randomBoolean()) { + exchange.close(); + } + }); + + final BlobContainer blobContainer = createBlobContainer(maxRetries, TimeValue.timeValueMillis(between(100, 500))); + try (InputStream inputStream = blobContainer.readBlob("read_blob_max_retries")) { + assertArrayEquals(bytes, BytesReference.toBytes(Streams.readFully(inputStream))); + assertThat(countDown.isCountedDown(), is(true)); + } + } + + public void testReadBlobWithReadTimeouts() { + final int maxRetries = randomIntBetween(1, 3); + final BlobContainer blobContainer = createBlobContainer(maxRetries, TimeValue.timeValueMillis(between(100, 200))); + + // HTTP server does not send a response + httpServer.createContext("/download/storage/v1/b/bucket/o/read_blob_unresponsive", exchange -> {}); + + StorageException storageException = expectThrows(StorageException.class, + () -> Streams.readFully(blobContainer.readBlob("read_blob_unresponsive"))); + assertThat(storageException.getMessage().toLowerCase(Locale.ROOT), containsString("read timed out")); + assertThat(storageException.getCause(), instanceOf(SocketTimeoutException.class)); + + // HTTP server sends a partial response + final byte[] bytes = randomBlobContent(); + httpServer.createContext("/download/storage/v1/b/bucket/o/read_blob_incomplete", exchange -> { + exchange.getResponseHeaders().add("Content-Type", "text/plain; charset=utf-8"); + exchange.sendResponseHeaders(HttpStatus.SC_OK, bytes.length); + final int bytesToSend = randomIntBetween(0, bytes.length - 1); + if (bytesToSend > 0) { + exchange.getResponseBody().write(bytes, 0, bytesToSend); + } + if (randomBoolean()) { + exchange.getResponseBody().flush(); + } + }); + + storageException = expectThrows(StorageException.class, () -> { + try (InputStream stream = blobContainer.readBlob("read_blob_incomplete")) { + Streams.readFully(stream); + } + }); + assertThat(storageException.getMessage().toLowerCase(Locale.ROOT), containsString("read timed out")); + assertThat(storageException.getCause(), instanceOf(SocketTimeoutException.class)); + } + + public void testWriteBlobWithRetries() throws Exception { + final int maxRetries = randomIntBetween(2, 10); + final CountDown countDown = new CountDown(maxRetries); + + final byte[] bytes = randomBlobContent(); + httpServer.createContext("/upload/storage/v1/b/bucket/o", exchange -> { + assertThat(exchange.getRequestURI().getQuery(), containsString("uploadType=multipart")); + if (countDown.countDown()) { + Optional> content = TestUtils.parseMultipartRequestBody(exchange.getRequestBody()); + assertThat(content.isPresent(), is(true)); + assertThat(content.get().v1(), equalTo("write_blob_max_retries")); + if (Objects.deepEquals(bytes, content.get().v2().array())) { + byte[] response = ("{\"bucket\":\"bucket\",\"name\":\"" + content.get().v1() + "\"}").getBytes(UTF_8); + exchange.getResponseHeaders().add("Content-Type", "application/json"); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); + exchange.getResponseBody().write(response); + } else { + exchange.sendResponseHeaders(HttpStatus.SC_BAD_REQUEST, -1); + } + exchange.close(); + return; + } + if (randomBoolean()) { + if (randomBoolean()) { + Streams.readFully(exchange.getRequestBody(), new byte[randomIntBetween(1, Math.max(1, bytes.length - 1))]); + } else { + Streams.readFully(exchange.getRequestBody()); + exchange.sendResponseHeaders(HttpStatus.SC_INTERNAL_SERVER_ERROR, -1); + } + } + exchange.close(); + }); + + final BlobContainer blobContainer = createBlobContainer(maxRetries, null); + try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", bytes), bytes.length)) { + blobContainer.writeBlob("write_blob_max_retries", stream, bytes.length, false); + } + assertThat(countDown.isCountedDown(), is(true)); + } + + public void testWriteBlobWithReadTimeouts() { + final byte[] bytes = randomByteArrayOfLength(randomIntBetween(10, 128)); + final TimeValue readTimeout = TimeValue.timeValueMillis(randomIntBetween(100, 500)); + final BlobContainer blobContainer = createBlobContainer(1, readTimeout); + + // HTTP server does not send a response + httpServer.createContext("/upload/storage/v1/b/bucket/o", exchange -> { + if (randomBoolean()) { + if (randomBoolean()) { + Streams.readFully(exchange.getRequestBody(), new byte[randomIntBetween(1, bytes.length - 1)]); + } else { + Streams.readFully(exchange.getRequestBody()); + } + } + }); + + Exception exception = expectThrows(StorageException.class, () -> { + try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", bytes), bytes.length)) { + blobContainer.writeBlob("write_blob_timeout", stream, bytes.length, false); + } + }); + assertThat(exception.getMessage().toLowerCase(Locale.ROOT), containsString("read timed out")); + + assertThat(exception.getCause(), instanceOf(SocketTimeoutException.class)); + assertThat(exception.getCause().getMessage().toLowerCase(Locale.ROOT), containsString("read timed out")); + } + + public void testWriteLargeBlob() throws IOException { + // See {@link BaseWriteChannel#DEFAULT_CHUNK_SIZE} + final int defaultChunkSize = 8 * 256 * 1024; + final int nbChunks = randomIntBetween(3, 5); + final int lastChunkSize = randomIntBetween(1, defaultChunkSize - 1); + final int totalChunks = nbChunks + 1; + final byte[] data = randomBytes(defaultChunkSize * nbChunks + lastChunkSize); + assertThat(data.length, greaterThan(GoogleCloudStorageBlobStore.LARGE_BLOB_THRESHOLD_BYTE_SIZE)); + + logger.debug("resumable upload is composed of [{}] total chunks ([{}] chunks of length [{}] and last chunk of length [{}]", + totalChunks, nbChunks, defaultChunkSize, lastChunkSize); + + final int nbErrors = 2; // we want all requests to fail at least once + final AtomicInteger countInits = new AtomicInteger(nbErrors); + final AtomicInteger countUploads = new AtomicInteger(nbErrors * totalChunks); + final AtomicBoolean allow410Gone = new AtomicBoolean(randomBoolean()); + final AtomicBoolean allowReadTimeout = new AtomicBoolean(rarely()); + final int wrongChunk = randomIntBetween(1, totalChunks); + + final AtomicReference sessionUploadId = new AtomicReference<>(UUIDs.randomBase64UUID()); + logger.debug("starting with resumable upload id [{}]", sessionUploadId.get()); + + httpServer.createContext("/upload/storage/v1/b/bucket/o", exchange -> { + final Map params = new HashMap<>(); + RestUtils.decodeQueryString(exchange.getRequestURI().getQuery(), 0, params); + assertThat(params.get("uploadType"), equalTo("resumable")); + + if ("POST".equals(exchange.getRequestMethod())) { + assertThat(params.get("name"), equalTo("write_large_blob")); + if (countInits.decrementAndGet() <= 0) { + byte[] response = Streams.readFully(exchange.getRequestBody()).utf8ToString().getBytes(UTF_8); + exchange.getResponseHeaders().add("Content-Type", "application/json"); + exchange.getResponseHeaders().add("Location", httpServerUrl() + + "/upload/storage/v1/b/bucket/o?uploadType=resumable&upload_id=" + sessionUploadId.get()); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); + exchange.getResponseBody().write(response); + exchange.close(); + return; + } + if (allowReadTimeout.get()) { + assertThat(wrongChunk, greaterThan(0)); + return; + } + + } else if ("PUT".equals(exchange.getRequestMethod())) { + final String uploadId = params.get("upload_id"); + if (uploadId.equals(sessionUploadId.get()) == false) { + logger.debug("session id [{}] is gone", uploadId); + assertThat(wrongChunk, greaterThan(0)); + Streams.readFully(exchange.getRequestBody()); + exchange.sendResponseHeaders(HttpStatus.SC_GONE, -1); + exchange.close(); + return; + } + + if (countUploads.get() == (wrongChunk * nbErrors)) { + if (allowReadTimeout.compareAndSet(true, false)) { + assertThat(wrongChunk, greaterThan(0)); + return; + } + if (allow410Gone.compareAndSet(true, false)) { + final String newUploadId = UUIDs.randomBase64UUID(random()); + logger.debug("chunk [{}] gone, updating session ids [{} -> {}]", wrongChunk, sessionUploadId.get(), newUploadId); + sessionUploadId.set(newUploadId); + + // we must reset the counters because the whole object upload will be retried + countInits.set(nbErrors); + countUploads.set(nbErrors * totalChunks); + + Streams.readFully(exchange.getRequestBody()); + exchange.sendResponseHeaders(HttpStatus.SC_GONE, -1); + exchange.close(); + return; + } + } + + final String range = exchange.getRequestHeaders().getFirst("Content-Range"); + assertTrue(Strings.hasLength(range)); + + if (countUploads.decrementAndGet() % 2 == 0) { + final ByteArrayOutputStream requestBody = new ByteArrayOutputStream(); + final long bytesRead = Streams.copy(exchange.getRequestBody(), requestBody); + assertThat(Math.toIntExact(bytesRead), anyOf(equalTo(defaultChunkSize), equalTo(lastChunkSize))); + + final int rangeStart = TestUtils.getContentRangeStart(range); + final int rangeEnd = TestUtils.getContentRangeEnd(range); + assertThat(rangeEnd + 1 - rangeStart, equalTo(Math.toIntExact(bytesRead))); + assertArrayEquals(Arrays.copyOfRange(data, rangeStart, rangeEnd + 1), requestBody.toByteArray()); + + final Integer limit = TestUtils.getContentRangeLimit(range); + if (limit != null) { + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), -1); + exchange.close(); + return; + } else { + exchange.getResponseHeaders().add("Range", String.format(Locale.ROOT, "bytes=%d/%d", rangeStart, rangeEnd)); + exchange.getResponseHeaders().add("Content-Length", "0"); + exchange.sendResponseHeaders(308 /* Resume Incomplete */, -1); + exchange.close(); + return; + } + } + } + + // read all the request body, otherwise the SDK client throws a non-retryable StorageException + Streams.readFully(exchange.getRequestBody()); + if (randomBoolean()) { + exchange.sendResponseHeaders(HttpStatus.SC_INTERNAL_SERVER_ERROR, -1); + } + exchange.close(); + }); + + final TimeValue readTimeout = allowReadTimeout.get() ? TimeValue.timeValueSeconds(3) : null; + + final BlobContainer blobContainer = createBlobContainer(nbErrors + 1, readTimeout); + try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", data), data.length)) { + blobContainer.writeBlob("write_large_blob", stream, data.length, false); + } + + assertThat(countInits.get(), equalTo(0)); + assertThat(countUploads.get(), equalTo(0)); + assertThat(allow410Gone.get(), is(false)); + } + + private static byte[] randomBlobContent() { + return randomByteArrayOfLength(randomIntBetween(1, frequently() ? 512 : 1 << 20)); // rarely up to 1mb + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 7c855becee40..ae528608bf36 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.MockSecureSettings; @@ -38,8 +39,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.Repository; @@ -53,9 +52,6 @@ import java.io.BufferedInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.net.URLDecoder; -import java.security.KeyPairGenerator; -import java.util.Arrays; -import java.util.Base64; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -63,13 +59,12 @@ import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.UUID; +import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; -import java.util.zip.GZIPInputStream; import static java.nio.charset.StandardCharsets.UTF_8; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING; @@ -77,6 +72,7 @@ import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSetting import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.TOKEN_URI_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageRepository.BUCKET; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageRepository.CLIENT_NAME; +import static org.hamcrest.Matchers.lessThanOrEqualTo; @SuppressForbidden(reason = "this test uses a HttpServer to emulate a Google Cloud Storage endpoint") public class GoogleCloudStorageBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryIntegTestCase { @@ -118,7 +114,7 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESMockAPIBasedRe @Override protected Settings nodeSettings(int nodeOrdinal) { if (serviceAccount == null) { - serviceAccount = createServiceAccount(); + serviceAccount = TestUtils.createServiceAccount(random()); } final Settings.Builder settings = Settings.builder(); @@ -217,31 +213,6 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESMockAPIBasedRe } } - private static byte[] createServiceAccount() { - try { - final KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA"); - keyPairGenerator.initialize(1024); - final String privateKey = Base64.getEncoder().encodeToString(keyPairGenerator.generateKeyPair().getPrivate().getEncoded()); - - final ByteArrayOutputStream out = new ByteArrayOutputStream(); - try (XContentBuilder builder = new XContentBuilder(XContentType.JSON.xContent(), out)) { - builder.startObject(); - { - builder.field("type", "service_account"); - builder.field("project_id", getTestClass().getName().toLowerCase(Locale.ROOT)); - builder.field("private_key_id", UUID.randomUUID().toString()); - builder.field("private_key", "-----BEGIN PRIVATE KEY-----\n" + privateKey + "\n-----END PRIVATE KEY-----\n"); - builder.field("client_email", "elastic@appspot.gserviceaccount.com"); - builder.field("client_id", String.valueOf(randomNonNegativeLong())); - } - builder.endObject(); - } - return out.toByteArray(); - } catch (Exception e) { - throw new AssertionError("Unable to create service account file", e); - } - } - /** * Minimal HTTP handler that acts as a Google Cloud Storage compliant server */ @@ -338,65 +309,16 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESMockAPIBasedRe exchange.getResponseBody().write(response); } else if (Regex.simpleMatch("POST /upload/storage/v1/b/bucket/*uploadType=multipart*", request)) { - try (BufferedInputStream in = new BufferedInputStream(new GZIPInputStream(exchange.getRequestBody()))) { - byte[] response = new byte[0]; - String blob = null; - int read; - while ((read = in.read()) != -1) { - boolean markAndContinue = false; - try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { - do { // search next consecutive {carriage return, new line} chars and stop - if ((char) read == '\r') { - int next = in.read(); - if (next != -1) { - if (next == '\n') { - break; - } - out.write(read); - out.write(next); - continue; - } - } - out.write(read); - } while ((read = in.read()) != -1); + Optional> content = TestUtils.parseMultipartRequestBody(exchange.getRequestBody()); + if (content.isPresent()) { + blobs.put(content.get().v1(), content.get().v2()); - final String line = new String(out.toByteArray(), UTF_8); - if (line.length() == 0 || line.equals("\r\n") || line.startsWith("--") - || line.toLowerCase(Locale.ROOT).startsWith("content")) { - markAndContinue = true; - } else if (line.startsWith("{\"bucket\":\"bucket\"")) { - markAndContinue = true; - Matcher matcher = Pattern.compile("\"name\":\"([^\"]*)\"").matcher(line); - if (matcher.find()) { - blob = matcher.group(1); - response = line.getBytes(UTF_8); - } - } - if (markAndContinue) { - in.mark(Integer.MAX_VALUE); - continue; - } - } - if (blob != null) { - in.reset(); - try (ByteArrayOutputStream binary = new ByteArrayOutputStream()) { - while ((read = in.read()) != -1) { - binary.write(read); - } - binary.flush(); - byte[] tmp = binary.toByteArray(); - // removes the trailing end "\r\n--__END_OF_PART__--\r\n" which is 23 bytes long - blobs.put(blob, new BytesArray(Arrays.copyOf(tmp, tmp.length - 23))); - - exchange.getResponseHeaders().add("Content-Type", "application/json"); - exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); - exchange.getResponseBody().write(response); - - } finally { - blob = null; - } - } - } + byte[] response = ("{\"bucket\":\"bucket\",\"name\":\"" + content.get().v1() + "\"}").getBytes(UTF_8); + exchange.getResponseHeaders().add("Content-Type", "application/json"); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); + exchange.getResponseBody().write(response); + } else { + exchange.sendResponseHeaders(RestStatus.BAD_REQUEST.getStatus(), -1); } } else if (Regex.simpleMatch("POST /upload/storage/v1/b/bucket/*uploadType=resumable*", request)) { @@ -419,41 +341,31 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESMockAPIBasedRe RestUtils.decodeQueryString(exchange.getRequestURI().getQuery(), 0, params); final String blobName = params.get("test_blob_name"); + byte[] blob = blobs.get(blobName).array(); + assertNotNull(blob); + final String range = exchange.getRequestHeaders().getFirst("Content-Range"); - assert Strings.hasLength(range); + final Integer limit = TestUtils.getContentRangeLimit(range); + final int start = TestUtils.getContentRangeStart(range); + final int end = TestUtils.getContentRangeEnd(range); - Matcher matcher = Pattern.compile("bytes ([^/]*)/([0-9\\*]*)").matcher(range); - if (matcher.find()) { - String bytes = matcher.group(1); - String limit = matcher.group(2); - byte[] blob = blobs.get(blobName).array(); - assert blob != null; - // client is uploading a chunk - matcher = Pattern.compile("([0-9]*)-([0-9]*)").matcher(bytes); - assert matcher.find(); + final ByteArrayOutputStream out = new ByteArrayOutputStream(); + long bytesRead = Streams.copy(exchange.getRequestBody(), out); + int length = Math.max(end + 1, limit != null ? limit : 0); + assertThat((int) bytesRead, lessThanOrEqualTo(length)); + if (length > blob.length) { + blob = ArrayUtil.growExact(blob, length); + } + System.arraycopy(out.toByteArray(), 0, blob, start, Math.toIntExact(bytesRead)); + blobs.put(blobName, new BytesArray(blob)); - int end = Integer.parseInt(matcher.group(2)); - int start = Integer.parseInt(matcher.group(1)); - - final ByteArrayOutputStream out = new ByteArrayOutputStream(); - long count = Streams.copy(exchange.getRequestBody(), out); - int length = Math.max(end + 1, "*".equals(limit) ? 0 : Integer.parseInt(limit)); - assert count <= length; - if (length > blob.length) { - blob = ArrayUtil.growExact(blob, length); - } - assert blob.length >= end; - System.arraycopy(out.toByteArray(), 0, blob, start, Math.toIntExact(count)); - blobs.put(blobName, new BytesArray(blob)); - - if ("*".equals(limit)) { - exchange.getResponseHeaders().add("Range", String.format(Locale.ROOT, "bytes=%d/%d", start, end)); - exchange.getResponseHeaders().add("Content-Length", "0"); - exchange.sendResponseHeaders(308 /* Resume Incomplete */, -1); - } else { - assert blob.length == Integer.parseInt(limit); - exchange.sendResponseHeaders(RestStatus.OK.getStatus(), -1); - } + if (limit == null) { + exchange.getResponseHeaders().add("Range", String.format(Locale.ROOT, "bytes=%d/%d", start, end)); + exchange.getResponseHeaders().add("Content-Length", "0"); + exchange.sendResponseHeaders(308 /* Resume Incomplete */, -1); + } else { + assertThat(limit, lessThanOrEqualTo(blob.length)); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), -1); } } else { exchange.sendResponseHeaders(RestStatus.INTERNAL_SERVER_ERROR.getStatus(), -1); diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/TestUtils.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/TestUtils.java new file mode 100644 index 000000000000..a6ae0578fbd4 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/TestUtils.java @@ -0,0 +1,158 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.repositories.gcs; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.BufferedInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.security.KeyPairGenerator; +import java.util.Arrays; +import java.util.Base64; +import java.util.Locale; +import java.util.Optional; +import java.util.Random; +import java.util.UUID; +import java.util.function.BiFunction; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.zip.GZIPInputStream; + +import static java.nio.charset.StandardCharsets.UTF_8; + +final class TestUtils { + + private TestUtils() {} + + /** + * Creates a random Service Account file for testing purpose + */ + static byte[] createServiceAccount(final Random random) { + try { + final KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA"); + keyPairGenerator.initialize(1024); + final String privateKey = Base64.getEncoder().encodeToString(keyPairGenerator.generateKeyPair().getPrivate().getEncoded()); + + final ByteArrayOutputStream out = new ByteArrayOutputStream(); + try (XContentBuilder builder = new XContentBuilder(XContentType.JSON.xContent(), out)) { + builder.startObject(); + { + builder.field("type", "service_account"); + builder.field("project_id", "test"); + builder.field("private_key_id", UUID.randomUUID().toString()); + builder.field("private_key", "-----BEGIN PRIVATE KEY-----\n" + privateKey + "\n-----END PRIVATE KEY-----\n"); + builder.field("client_email", "elastic@appspot.gserviceaccount.com"); + builder.field("client_id", String.valueOf(Math.abs(random.nextLong()))); + } + builder.endObject(); + } + return out.toByteArray(); + } catch (Exception e) { + throw new AssertionError("Unable to create service account file", e); + } + } + + static Optional> parseMultipartRequestBody(final InputStream requestBody) throws IOException { + Tuple content = null; + try (BufferedInputStream in = new BufferedInputStream(new GZIPInputStream(requestBody))) { + String name = null; + int read; + while ((read = in.read()) != -1) { + boolean markAndContinue = false; + try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { + do { // search next consecutive {carriage return, new line} chars and stop + if ((char) read == '\r') { + int next = in.read(); + if (next != -1) { + if (next == '\n') { + break; + } + out.write(read); + out.write(next); + continue; + } + } + out.write(read); + } while ((read = in.read()) != -1); + + final String line = new String(out.toByteArray(), UTF_8); + if (line.length() == 0 || line.equals("\r\n") || line.startsWith("--") + || line.toLowerCase(Locale.ROOT).startsWith("content")) { + markAndContinue = true; + } else if (line.startsWith("{\"bucket\":\"bucket\"")) { + markAndContinue = true; + Matcher matcher = Pattern.compile("\"name\":\"([^\"]*)\"").matcher(line); + if (matcher.find()) { + name = matcher.group(1); + } + } + if (markAndContinue) { + in.mark(Integer.MAX_VALUE); + continue; + } + } + if (name != null) { + in.reset(); + try (ByteArrayOutputStream binary = new ByteArrayOutputStream()) { + while ((read = in.read()) != -1) { + binary.write(read); + } + binary.flush(); + byte[] tmp = binary.toByteArray(); + // removes the trailing end "\r\n--__END_OF_PART__--\r\n" which is 23 bytes long + content = Tuple.tuple(name, new BytesArray(Arrays.copyOf(tmp, tmp.length - 23))); + } + } + } + } + return Optional.ofNullable(content); + } + + private static final Pattern PATTERN_CONTENT_RANGE = Pattern.compile("bytes ([^/]*)/([0-9\\*]*)"); + private static final Pattern PATTERN_CONTENT_RANGE_BYTES = Pattern.compile("([0-9]*)-([0-9]*)"); + + private static Integer parse(final Pattern pattern, final String contentRange, final BiFunction fn) { + final Matcher matcher = pattern.matcher(contentRange); + if (matcher.matches() == false || matcher.groupCount() != 2) { + throw new IllegalArgumentException("Unable to parse content range header"); + } + return fn.apply(matcher.group(1), matcher.group(2)); + } + + static Integer getContentRangeLimit(final String contentRange) { + return parse(PATTERN_CONTENT_RANGE, contentRange, (bytes, limit) -> "*".equals(limit) ? null : Integer.parseInt(limit)); + } + + static int getContentRangeStart(final String contentRange) { + return parse(PATTERN_CONTENT_RANGE, contentRange, + (bytes, limit) -> parse(PATTERN_CONTENT_RANGE_BYTES, bytes, + (start, end) -> Integer.parseInt(start))); + } + + static int getContentRangeEnd(final String contentRange) { + return parse(PATTERN_CONTENT_RANGE, contentRange, + (bytes, limit) -> parse(PATTERN_CONTENT_RANGE_BYTES, bytes, + (start, end) -> Integer.parseInt(end))); + } +} diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index a39d8401cbf0..b14b9c712ff8 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -37,7 +37,7 @@ versions << [ 'hadoop2': '2.8.1' ] -testFixtures.useFixture ":test:fixtures:krb5kdc-fixture" +testFixtures.useFixture ":test:fixtures:krb5kdc-fixture", "hdfs" configurations { hdfsFixture diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index dad3aecbc10e..4694f201f123 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -29,7 +29,7 @@ esplugin { } versions << [ - 'aws': '1.11.562' + 'aws': '1.11.636' ] dependencies { @@ -139,6 +139,9 @@ task thirdPartyTest(type: Test) { if (useFixture) { apply plugin: 'elasticsearch.test.fixtures' + + testFixtures.useFixture() + task writeDockerFile { File minioDockerfile = new File("${project.buildDir}/minio-docker/Dockerfile") outputs.file(minioDockerfile) diff --git a/plugins/repository-s3/licenses/aws-java-sdk-core-1.11.562.jar.sha1 b/plugins/repository-s3/licenses/aws-java-sdk-core-1.11.562.jar.sha1 deleted file mode 100644 index ed8ded6a3608..000000000000 --- a/plugins/repository-s3/licenses/aws-java-sdk-core-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b5fc47ec1b5afe180f5ebb4eda755acdca7a20ae \ No newline at end of file diff --git a/plugins/repository-s3/licenses/aws-java-sdk-core-1.11.636.jar.sha1 b/plugins/repository-s3/licenses/aws-java-sdk-core-1.11.636.jar.sha1 new file mode 100644 index 000000000000..b9ee9c102dbc --- /dev/null +++ b/plugins/repository-s3/licenses/aws-java-sdk-core-1.11.636.jar.sha1 @@ -0,0 +1 @@ +84c9f180f8f60f6f1433c9c5253fcb704593b121 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/aws-java-sdk-s3-1.11.562.jar.sha1 b/plugins/repository-s3/licenses/aws-java-sdk-s3-1.11.562.jar.sha1 deleted file mode 100644 index 8e852fe9b275..000000000000 --- a/plugins/repository-s3/licenses/aws-java-sdk-s3-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1712c878f7e9483ceac1eb2356a9457a3c8df03e \ No newline at end of file diff --git a/plugins/repository-s3/licenses/aws-java-sdk-s3-1.11.636.jar.sha1 b/plugins/repository-s3/licenses/aws-java-sdk-s3-1.11.636.jar.sha1 new file mode 100644 index 000000000000..1e05e98d240d --- /dev/null +++ b/plugins/repository-s3/licenses/aws-java-sdk-s3-1.11.636.jar.sha1 @@ -0,0 +1 @@ +f86fc1993ac8122f6f02a8eb9b467b5f945cd76b \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jmespath-java-1.11.562.jar.sha1 b/plugins/repository-s3/licenses/jmespath-java-1.11.562.jar.sha1 deleted file mode 100644 index 8e2d0e1935a3..000000000000 --- a/plugins/repository-s3/licenses/jmespath-java-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1147ed0ad1f2c5a16b8271e38e3cda5cd488c8ae \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jmespath-java-1.11.636.jar.sha1 b/plugins/repository-s3/licenses/jmespath-java-1.11.636.jar.sha1 new file mode 100644 index 000000000000..70c0d3633af0 --- /dev/null +++ b/plugins/repository-s3/licenses/jmespath-java-1.11.636.jar.sha1 @@ -0,0 +1 @@ +e468c349ce410171a1d5df7fa0fa377d52c5d651 \ No newline at end of file diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index edc10bdec3a8..00ad1b7e9b23 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -68,7 +68,7 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { /** * Simplest case: a module with no controller daemon. */ - public void testNoControllerSpawn() throws IOException, InterruptedException { + public void testNoControllerSpawn() throws IOException { Path esHome = createTempDir().resolve("esHome"); Settings.Builder settingsBuilder = Settings.builder(); settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.toString()); diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index 858a2d606706..15f110126f1c 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -35,7 +35,6 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.seqno.RetentionLeaseUtils; import org.elasticsearch.rest.action.document.RestIndexAction; import org.elasticsearch.test.rest.yaml.ObjectPath; -import org.hamcrest.Matcher; import org.hamcrest.Matchers; import java.io.IOException; @@ -180,25 +179,6 @@ public class RecoveryIT extends AbstractRollingTestCase { } } - private void assertDocCountOnAllCopies(String index, int expectedCount) throws Exception { - assertBusy(() -> { - Map state = entityAsMap(client().performRequest(new Request("GET", "/_cluster/state"))); - String xpath = "routing_table.indices." + index + ".shards.0.node"; - @SuppressWarnings("unchecked") List assignedNodes = (List) XContentMapValues.extractValue(xpath, state); - assertNotNull(state.toString(), assignedNodes); - for (String assignedNode : assignedNodes) { - try { - assertCount(index, "_only_nodes:" + assignedNode, expectedCount); - } catch (ResponseException e) { - if (e.getMessage().contains("no data nodes with criteria [" + assignedNode + "found for shard: [" + index + "][0]")) { - throw new AssertionError(e); // shard is relocating - ask assert busy to retry - } - throw e; - } - } - }); - } - private void assertCount(final String index, final String preference, final int expectedCount) throws IOException { final int actualDocs; try { @@ -614,20 +594,6 @@ public class RecoveryIT extends AbstractRollingTestCase { ensureGlobalCheckpointSynced(index); } - @SuppressWarnings("unchecked") - private void assertPeerRecoveredFiles(String reason, String index, String targetNode, Matcher sizeMatcher) throws IOException { - Map recoveryStats = entityAsMap(client().performRequest(new Request("GET", index + "/_recovery"))); - List> shards = (List>) XContentMapValues.extractValue(index + "." + "shards", recoveryStats); - for (Map shard : shards) { - if (Objects.equals(XContentMapValues.extractValue("type", shard), "PEER")) { - if (Objects.equals(XContentMapValues.extractValue("target.name", shard), targetNode)) { - Integer recoveredFileSize = (Integer) XContentMapValues.extractValue("index.files.recovered", shard); - assertThat(reason + " target node [" + targetNode + "] stats [" + recoveryStats + "]", recoveredFileSize, sizeMatcher); - } - } - } - } - @SuppressWarnings("unchecked") private void ensureGlobalCheckpointSynced(String index) throws Exception { assertBusy(() -> { diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/SearchRestCancellationIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/SearchRestCancellationIT.java index 698f42c43ca0..475176eb31ea 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/SearchRestCancellationIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/SearchRestCancellationIT.java @@ -240,7 +240,7 @@ public class SearchRestCancellationIT extends HttpSmokeTestCase { LogManager.getLogger(SearchRestCancellationIT.class).info("Blocking on the document {}", fieldsLookup.get("_id")); hits.incrementAndGet(); try { - awaitBusy(() -> shouldBlock.get() == false); + waitUntil(() -> shouldBlock.get() == false); } catch (Exception e) { throw new RuntimeException(e); } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/_common.json b/rest-api-spec/src/main/resources/rest-api-spec/api/_common.json index 69a1f8fb8ce3..1505db774f0d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/_common.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/_common.json @@ -1,6 +1,8 @@ { - "description": "Parameters that are accepted by all API endpoints.", - "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html", + "documentation" : { + "description": "Parameters that are accepted by all API endpoints.", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html" + }, "params": { "pretty": { "type": "boolean", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json b/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json index c7c393a6a1cb..7f630f7666f3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/explain.json @@ -23,32 +23,6 @@ "description":"The name of the index" } } - }, - { - "path":"/{index}/{type}/{id}/_explain", - "methods":[ - "GET", - "POST" - ], - "parts":{ - "id":{ - "type":"string", - "description":"The document ID" - }, - "index":{ - "type":"string", - "description":"The name of the index" - }, - "type":{ - "type":"string", - "description":"The type of the document", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json index 449754fcd405..9b210b893275 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json @@ -34,52 +34,6 @@ "description":"A comma-separated list of fields" } } - }, - { - "path":"/_mapping/{type}/field/{fields}", - "methods":[ - "GET" - ], - "parts":{ - "type":{ - "type":"list", - "description":"A comma-separated list of document types", - "deprecated":true - }, - "fields":{ - "type":"list", - "description":"A comma-separated list of fields" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } - }, - { - "path":"/{index}/_mapping/{type}/field/{fields}", - "methods":[ - "GET" - ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names" - }, - "type":{ - "type":"list", - "description":"A comma-separated list of document types", - "deprecated":true - }, - "fields":{ - "type":"list", - "description":"A comma-separated list of fields" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json index 313e5cff7d20..17274e8bbd78 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json @@ -24,52 +24,10 @@ "description":"A comma-separated list of index names" } } - }, - { - "path":"/_mapping/{type}", - "methods":[ - "GET" - ], - "parts":{ - "type":{ - "type":"list", - "description":"A comma-separated list of document types", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } - }, - { - "path":"/{index}/_mapping/{type}", - "methods":[ - "GET" - ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names" - }, - "type":{ - "type":"list", - "description":"A comma-separated list of document types", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } } ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether to add the type name to the response (default: false)" - }, "ignore_unavailable":{ "type":"boolean", "description":"Whether specified concrete indices should be ignored when unavailable (missing or closed)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json index 901576e5980b..b2b0741203be 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json @@ -19,155 +19,10 @@ "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices." } } - }, - { - "path":"/{index}/{type}/_mapping", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices." - }, - "type":{ - "type":"string", - "description":"The name of the document type", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } - }, - { - "path":"/{index}/_mapping/{type}", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices." - }, - "type":{ - "type":"string", - "description":"The name of the document type", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } - }, - { - "path":"/{index}/{type}/_mappings", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices." - }, - "type":{ - "type":"string", - "description":"The name of the document type", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } - }, - { - "path":"/{index}/_mappings/{type}", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices." - }, - "type":{ - "type":"string", - "description":"The name of the document type", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } - }, - { - "path":"/_mappings/{type}", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "type":{ - "type":"string", - "description":"The name of the document type", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } - }, - { - "path":"{index}/_mappings", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices." - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"The plural mappings is accepted but only /_mapping is documented" - } - }, - { - "path":"/_mapping/{type}", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "type":{ - "type":"string", - "description":"The name of the document type", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } } ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be expected in the body of the mappings." - }, "timeout":{ "type":"time", "description":"Explicit operation timeout" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yml index 50a5804d887c..cba15e21e554 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yml @@ -32,7 +32,6 @@ setup: - is_true: matched - match: { explanation.value: 1 } - match: { _index: test_1 } - - match: { _type: _doc } - match: { _id: id_1 } --- @@ -49,7 +48,6 @@ setup: - is_true: matched - match: { explanation.value: 1 } - match: { _index: test_1 } - - match: { _type: _doc } - match: { _id: id_1 } --- diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/11_basic_with_types.yml deleted file mode 100644 index 5f211435ae97..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/11_basic_with_types.yml +++ /dev/null @@ -1,66 +0,0 @@ -setup: - - do: - indices.create: - index: test_1 - body: - aliases: - alias_1: - "filter" : { "term" : { "foo" : "bar"} } - - - do: - index: - index: test_1 - type: test - id: id_1 - body: { foo: bar, title: howdy } - - - do: - indices.refresh: {} - ---- -"Basic explain": - - - do: - explain: - index: test_1 - type: test - id: id_1 - body: - query: - match_all: {} - - - is_true: matched - - match: { explanation.value: 1 } - - match: { _index: test_1 } - - match: { _type: test } - - match: { _id: id_1 } - ---- -"Basic explain with alias": - - - do: - explain: - index: alias_1 - type: test - id: id_1 - body: - query: - match_all: {} - - - is_true: matched - - match: { explanation.value: 1 } - - match: { _index: test_1 } - - match: { _type: test } - - match: { _id: id_1 } - ---- -"Explain body without query element": - - do: - catch: bad_request - explain: - index: test_1 - type: test - id: id_1 - body: - match_all: {} - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/20_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/20_source_filtering.yml index ae03a58f13f6..1708034df8d4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/20_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/20_source_filtering.yml @@ -14,7 +14,6 @@ - do: explain: { index: test_1, id: 1, _source: false, body: { query: { match_all: {}} } } - match: { _index: test_1 } - - match: { _type: _doc } - match: { _id: "1" } - is_false: get._source diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/21_source_filtering_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/21_source_filtering_with_types.yml deleted file mode 100644 index e13edf7be504..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/21_source_filtering_with_types.yml +++ /dev/null @@ -1,44 +0,0 @@ ---- -"Source filtering": - - do: - index: - index: test_1 - type: test - id: 1 - body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 } - - do: - indices.refresh: - index: test_1 - - - do: - explain: { index: test_1, type: test, id: 1, _source: false, body: { query: { match_all: {}} } } - - match: { _index: test_1 } - - match: { _type: test } - - match: { _id: "1" } - - is_false: get._source - - - do: - explain: { index: test_1, type: test, id: 1, _source: true, body: { query: { match_all: {}} } } - - match: { get._source.include.field1: v1 } - - - do: - explain: { index: test_1, type: test, id: 1, _source: include.field1, body: { query: { match_all: {}} } } - - match: { get._source.include.field1: v1 } - - is_false: get._source.include.field2 - - - do: - explain: { index: test_1, type: test, id: 1, _source_includes: include.field1, body: { query: { match_all: {}} } } - - match: { get._source.include.field1: v1 } - - is_false: get._source.include.field2 - - - do: - explain: { index: test_1, type: test, id: 1, _source_includes: "include.field1,include.field2", body: { query: { match_all: {}} } } - - match: { get._source.include.field1: v1 } - - match: { get._source.include.field2: v2 } - - is_false: get._source.count - - - do: - explain: { index: test_1, type: test, id: 1, _source_includes: include, _source_excludes: "*.field2", body: { query: { match_all: {}} } } - - match: { get._source.include.field1: v1 } - - is_false: get._source.include.field2 - - is_false: get._source.count diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/31_query_string_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/31_query_string_with_types.yml deleted file mode 100644 index b6930688acf2..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/31_query_string_with_types.yml +++ /dev/null @@ -1,71 +0,0 @@ ---- -"explain with query_string parameters": - - do: - indices.create: - include_type_name: true - index: test - body: - mappings: - test: - properties: - number: - type: integer - - - do: - index: - index: test - type: test - id: 1 - body: { field: foo bar} - - - do: - indices.refresh: - index: [test] - - - do: - explain: - index: test - type: test - id: 1 - q: bar - df: field - - - is_true: matched - - - do: - explain: - index: test - type: test - id: 1 - q: field:foo field:xyz - - - is_true: matched - - - do: - explain: - index: test - type: test - id: 1 - q: field:foo field:xyz - default_operator: AND - - - is_false: matched - - - do: - explain: - index: test - type: test - id: 1 - q: field:BA* - - - is_true: matched - - - do: - explain: - index: test - type: test - id: 1 - q: number:foo - lenient: true - - - is_false: matched diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/40_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/40_mix_typeless_typeful.yml deleted file mode 100644 index 355f9d859112..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/40_mix_typeless_typeful.yml +++ /dev/null @@ -1,38 +0,0 @@ ---- -"Explain with typeless API on an index that has types": - - - do: - indices.create: # not using include_type_name: false on purpose - include_type_name: true - index: index - body: - mappings: - not_doc: - properties: - foo: - type: "keyword" - - - do: - index: - index: index - type: not_doc - id: 1 - body: { foo: bar } - - - do: - indices.refresh: {} - - - do: - explain: - index: index - type: _doc #todo: make _explain typeless and remove this - id: 1 - body: - query: - match_all: {} - - - match: { _index: "index" } - - match: { _type: "_doc" } - - match: { _id: "1"} - - is_true: matched - - match: { explanation.value: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/11_basic_with_types.yml deleted file mode 100644 index e74ffd9cf177..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/11_basic_with_types.yml +++ /dev/null @@ -1,141 +0,0 @@ ---- -"Create index with mappings": - - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - type_1: {} - - - do: - indices.get_mapping: - include_type_name: true - index: test_index - - - is_true: test_index.mappings.type_1 - ---- -"Create index with settings": - - - do: - indices.create: - include_type_name: true - index: test_index - body: - settings: - number_of_replicas: "0" - - - do: - indices.get_settings: - index: test_index - - - match: { test_index.settings.index.number_of_replicas: "0"} - ---- -"Create index": - - - do: - indices.create: - include_type_name: true - index: test_index - - - match: { acknowledged: true } - - match: { index: "test_index"} - ---- -"Create index with wait_for_active_shards set to all": - - - do: - indices.create: - include_type_name: true - index: test_index - wait_for_active_shards: all - body: - settings: - number_of_replicas: "0" - - - match: { acknowledged: true } - - match: { shards_acknowledged: true } - ---- -"Create index with aliases": - - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - type_1: - properties: - field: - type: text - aliases: - test_alias: {} - test_blias: - routing: b - test_clias: - filter: - term: - field : value - - - do: - indices.get_alias: - index: test_index - - - match: {test_index.aliases.test_blias.search_routing: b} - - match: {test_index.aliases.test_blias.index_routing: b} - - is_false: test_index.aliases.test_blias.filter - - match: {test_index.aliases.test_clias.filter.term.field: value} - - is_false: test_index.aliases.test_clias.index_routing - - is_false: test_index.aliases.test_clias.search_routing - ---- -"Create index with write aliases": - - - do: - indices.create: - include_type_name: true - index: test_index - body: - aliases: - test_alias: {} - test_blias: - is_write_index: false - test_clias: - is_write_index: true - - - do: - indices.get_alias: - index: test_index - - - is_false: test_index.aliases.test_alias.is_write_index - - is_false: test_index.aliases.test_blias.is_write_index - - is_true: test_index.aliases.test_clias.is_write_index - ---- -"Create index with no type mappings": - - do: - catch: /illegal_argument_exception/ - indices.create: - include_type_name: true - index: test_index - body: - mappings: - "" : {} - ---- -"Create index with invalid mappings": - - do: - catch: /illegal_argument_exception/ - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - "": - type: keyword diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/20_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/20_mix_typeless_typeful.yml deleted file mode 100644 index e4fedae6d1aa..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/20_mix_typeless_typeful.yml +++ /dev/null @@ -1,120 +0,0 @@ ---- -"Create a typeless index while there is a typed template": - - do: - indices.put_template: - include_type_name: true - name: test_template - body: - index_patterns: test-* - mappings: - my_type: - properties: - foo: - type: keyword - - - do: - indices.create: - index: test-1 - body: - mappings: - properties: - bar: - type: "long" - - - do: - indices.get_mapping: - include_type_name: true - index: test-1 - - - is_true: test-1.mappings._doc # the index creation call won - - is_false: test-1.mappings.my_type - - is_true: test-1.mappings._doc.properties.foo - - is_true: test-1.mappings._doc.properties.bar - ---- -"Create a typed index while there is a typeless template": - - do: - indices.put_template: - include_type_name: false - name: test_template - body: - index_patterns: test-* - mappings: - properties: - foo: - type: keyword - - - do: - indices.create: - include_type_name: true - index: test-1 - body: - mappings: - my_type: - properties: - bar: - type: "long" - - - do: - indices.get_mapping: - include_type_name: true - index: test-1 - - - is_true: test-1.mappings.my_type # the index creation call won - - is_false: test-1.mappings._doc - - is_true: test-1.mappings.my_type.properties.foo - - is_true: test-1.mappings.my_type.properties.bar - ---- -"Implicitly create a typed index while there is a typeless template": - - do: - indices.put_template: - include_type_name: false - name: test_template - body: - index_patterns: test-* - mappings: - properties: - foo: - type: keyword - - - do: - catch: /the final mapping would have more than 1 type/ - index: - index: test-1 - type: my_type - body: { bar: 42 } - ---- -"Implicitly create a typeless index while there is a typed template": - - do: - indices.put_template: - include_type_name: true - name: test_template - body: - index_patterns: test-* - mappings: - my_type: - properties: - foo: - type: keyword - - - do: - index: - index: test-1 - body: { bar: 42 } - -# ensures dynamic mapping update is visible to get_mapping - - do: - cluster.health: - wait_for_events: normal - - - do: - indices.get_mapping: - include_type_name: true - index: test-1 - - - is_true: test-1.mappings.my_type # the template is honored - - is_false: test-1.mappings._doc - - is_true: test-1.mappings.my_type.properties.foo - - is_true: test-1.mappings.my_type.properties.bar diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/11_basic_with_types.yml deleted file mode 100644 index 0a7f5fa3560b..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/11_basic_with_types.yml +++ /dev/null @@ -1,83 +0,0 @@ ---- -setup: - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - text: - type: text - ---- -"Get field mapping with no index and type": - - - do: - indices.get_field_mapping: - include_type_name: true - fields: text - - - match: {test_index.mappings.test_type.text.mapping.text.type: text} - ---- -"Get field mapping by index only": - - do: - indices.get_field_mapping: - include_type_name: true - index: test_index - fields: text - - - match: {test_index.mappings.test_type.text.mapping.text.type: text} - ---- -"Get field mapping by type & field": - - - do: - indices.get_field_mapping: - include_type_name: true - index: test_index - type: test_type - fields: text - - - match: {test_index.mappings.test_type.text.mapping.text.type: text} - ---- -"Get field mapping by type & field, with another field that doesn't exist": - - - do: - indices.get_field_mapping: - include_type_name: true - index: test_index - type: test_type - fields: [ text , text1 ] - - - match: {test_index.mappings.test_type.text.mapping.text.type: text} - - is_false: test_index.mappings.test_type.text1 - ---- -"Get field mapping with include_defaults": - - - do: - indices.get_field_mapping: - include_type_name: true - index: test_index - type: test_type - fields: text - include_defaults: true - - - match: {test_index.mappings.test_type.text.mapping.text.type: text} - - match: {test_index.mappings.test_type.text.mapping.text.analyzer: default} - ---- -"Get field mapping should work without index specifying type and fields": - - - do: - indices.get_field_mapping: - include_type_name: true - type: test_type - fields: text - - - match: {test_index.mappings.test_type.text.mapping.text.type: text} - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/21_missing_field_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/21_missing_field_with_types.yml deleted file mode 100644 index 264d187ebd22..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/21_missing_field_with_types.yml +++ /dev/null @@ -1,23 +0,0 @@ ---- -"Return empty object if field doesn't exist, but type and index do": - - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - indices.get_field_mapping: - include_type_name: true - index: test_index - type: test_type - fields: not_existent - - - match: { '': {}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/30_missing_type.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/30_missing_type.yml deleted file mode 100644 index 0bf3f1f7823e..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/30_missing_type.yml +++ /dev/null @@ -1,22 +0,0 @@ ---- -"Raise 404 when type doesn't exist": - - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - catch: missing - indices.get_field_mapping: - include_type_name: true - index: test_index - type: not_test_type - fields: text diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/51_field_wildcards_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/51_field_wildcards_with_types.yml deleted file mode 100644 index 68c183e9b292..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/51_field_wildcards_with_types.yml +++ /dev/null @@ -1,144 +0,0 @@ ---- -setup: - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - t1: - type: text - t2: - type: text - obj: - properties: - t1: - type: text - i_t1: - type: text - i_t3: - type: text - - - do: - indices.create: - include_type_name: true - index: test_index_2 - body: - mappings: - test_type_2: - properties: - t1: - type: text - t2: - type: text - obj: - properties: - t1: - type: text - i_t1: - type: text - i_t3: - type: text - ---- -"Get field mapping with * for fields": - - - do: - indices.get_field_mapping: - include_type_name: true - fields: "*" - - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.t2.full_name: t2 } - - match: {test_index.mappings.test_type.obj\.t1.full_name: obj.t1 } - - match: {test_index.mappings.test_type.obj\.i_t1.full_name: obj.i_t1 } - - match: {test_index.mappings.test_type.obj\.i_t3.full_name: obj.i_t3 } - ---- -"Get field mapping with t* for fields": - - - do: - indices.get_field_mapping: - include_type_name: true - index: test_index - fields: "t*" - - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.t2.full_name: t2 } - - length: {test_index.mappings.test_type: 2} - ---- -"Get field mapping with *t1 for fields": - - - do: - indices.get_field_mapping: - include_type_name: true - index: test_index - fields: "*t1" - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.obj\.t1.full_name: obj.t1 } - - match: {test_index.mappings.test_type.obj\.i_t1.full_name: obj.i_t1 } - - length: {test_index.mappings.test_type: 3} - ---- -"Get field mapping with wildcarded relative names": - - - do: - indices.get_field_mapping: - include_type_name: true - index: test_index - fields: "obj.i_*" - - match: {test_index.mappings.test_type.obj\.i_t1.full_name: obj.i_t1 } - - match: {test_index.mappings.test_type.obj\.i_t3.full_name: obj.i_t3 } - - length: {test_index.mappings.test_type: 2} - ---- -"Get field mapping should work using '_all' for indices and types": - - - do: - indices.get_field_mapping: - include_type_name: true - index: _all - type: _all - fields: "t*" - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.t2.full_name: t2 } - - length: {test_index.mappings.test_type: 2} - - match: {test_index_2.mappings.test_type_2.t1.full_name: t1 } - - match: {test_index_2.mappings.test_type_2.t2.full_name: t2 } - - length: {test_index_2.mappings.test_type_2: 2} - ---- -"Get field mapping should work using '*' for indices and types": - - - do: - indices.get_field_mapping: - include_type_name: true - index: '*' - type: '*' - fields: "t*" - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.t2.full_name: t2 } - - length: {test_index.mappings.test_type: 2} - - match: {test_index_2.mappings.test_type_2.t1.full_name: t1 } - - match: {test_index_2.mappings.test_type_2.t2.full_name: t2 } - - length: {test_index_2.mappings.test_type_2: 2} - ---- -"Get field mapping should work using comma_separated values for indices and types": - - - do: - indices.get_field_mapping: - include_type_name: true - index: 'test_index,test_index_2' - type: 'test_type,test_type_2' - fields: "t*" - - match: {test_index.mappings.test_type.t1.full_name: t1 } - - match: {test_index.mappings.test_type.t2.full_name: t2 } - - length: {test_index.mappings.test_type: 2} - - match: {test_index_2.mappings.test_type_2.t1.full_name: t1 } - - match: {test_index_2.mappings.test_type_2.t2.full_name: t2 } - - length: {test_index_2.mappings.test_type_2: 2} - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/60_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/60_mix_typeless_typeful.yml deleted file mode 100644 index 2b6433a3e98f..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/60_mix_typeless_typeful.yml +++ /dev/null @@ -1,21 +0,0 @@ ---- -"GET mapping with typeless API on an index that has types": - - - do: - indices.create: # not using include_type_name: false on purpose - include_type_name: true - index: index - body: - mappings: - not_doc: - properties: - foo: - type: "keyword" - - - do: - indices.get_field_mapping: - include_type_name: false - index: index - fields: foo - - - match: { index.mappings.foo.mapping.foo.type: "keyword" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/11_basic_with_types.yml deleted file mode 100644 index 598cc24f7806..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/11_basic_with_types.yml +++ /dev/null @@ -1,158 +0,0 @@ ---- -setup: - - do: - indices.create: - include_type_name: true - index: test_1 - body: - mappings: - doc: {} - - do: - indices.create: - include_type_name: true - index: test_2 - body: - mappings: - doc: {} ---- -"Get /{index}/_mapping with empty mappings": - - - do: - indices.create: - index: t - - - do: - indices.get_mapping: - include_type_name: true - index: t - - - match: { t.mappings: {}} - ---- -"Get /_mapping": - - - do: - indices.get_mapping: - include_type_name: true - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /{index}/_mapping": - - - do: - indices.get_mapping: - include_type_name: true - index: test_1 - - - is_true: test_1.mappings.doc - - is_false: test_2 - - ---- -"Get /{index}/_mapping/_all": - - - do: - indices.get_mapping: - include_type_name: true - index: test_1 - type: _all - - - is_true: test_1.mappings.doc - - is_false: test_2 - ---- -"Get /{index}/_mapping/*": - - - do: - indices.get_mapping: - include_type_name: true - index: test_1 - type: '*' - - - is_true: test_1.mappings.doc - - is_false: test_2 - ---- -"Get /{index}/_mapping/{type}": - - - do: - indices.get_mapping: - include_type_name: true - index: test_1 - type: doc - - - is_true: test_1.mappings.doc - - is_false: test_2 - ---- -"Get /{index}/_mapping/{type*}": - - - do: - indices.get_mapping: - include_type_name: true - index: test_1 - type: 'd*' - - - is_true: test_1.mappings.doc - - is_false: test_2 - ---- -"Get /_mapping/{type}": - - - do: - indices.get_mapping: - include_type_name: true - type: doc - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /_all/_mapping/{type}": - - - do: - indices.get_mapping: - include_type_name: true - index: _all - type: doc - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /*/_mapping/{type}": - - - do: - indices.get_mapping: - include_type_name: true - index: '*' - type: doc - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /index,index/_mapping/{type}": - - - do: - indices.get_mapping: - include_type_name: true - index: test_1,test_2 - type: doc - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /index*/_mapping/{type}": - - - do: - indices.get_mapping: - include_type_name: true - index: '*2' - type: doc - - - is_true: test_2.mappings.doc - - is_false: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/20_missing_type.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/20_missing_type.yml deleted file mode 100644 index f17fb6a59530..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/20_missing_type.yml +++ /dev/null @@ -1,106 +0,0 @@ ---- -"Non-existent type returns 404": - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - catch: missing - indices.get_mapping: - include_type_name: true - index: test_index - type: not_test_type - - - match: { status: 404 } - - match: { error.reason: 'type[[not_test_type]] missing' } - ---- -"No type matching pattern returns 404": - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - catch: missing - indices.get_mapping: - include_type_name: true - index: test_index - type: test*,not* - - - match: { status: 404 } - - match: { error: 'type [not*] missing' } - - is_true: test_index.mappings.test_type - ---- -"Existent and non-existent type returns 404 and the existing type": - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - catch: missing - indices.get_mapping: - include_type_name: true - index: test_index - type: test_type,not_test_type - - - match: { status: 404 } - - match: { error: 'type [not_test_type] missing' } - - is_true: test_index.mappings.test_type - ---- -"Existent and non-existent types returns 404 and the existing type": - - do: - indices.create: - include_type_name: true - index: test_index - body: - mappings: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - catch: missing - indices.get_mapping: - include_type_name: true - index: test_index - type: test_type,not_test_type,another_not_test_type - - - match: { status: 404 } - - match: { error: 'types [another_not_test_type,not_test_type] missing' } - - is_true: test_index.mappings.test_type - ---- -"Type missing when no types exist": - - do: - catch: missing - indices.get_mapping: - include_type_name: true - type: not_test_type diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/40_aliases.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/40_aliases.yml index 15a52b7b2db2..956b80ce16b5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/40_aliases.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/40_aliases.yml @@ -18,7 +18,6 @@ - do: indices.get_mapping: - include_type_name: false index: test_alias - match: {test_index.mappings.properties.text.type: text} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/61_empty_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/61_empty_with_types.yml deleted file mode 100644 index 6da7f4a2c694..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/61_empty_with_types.yml +++ /dev/null @@ -1,20 +0,0 @@ ---- -setup: - - - do: - indices.create: - index: test_1 - - - do: - indices.create: - index: test_2 - ---- -"Check empty mapping when getting all mappings via /_mapping": - - - do: - indices.get_mapping: - include_type_name: true - - - match: { test_1.mappings: {}} - - match: { test_2.mappings: {}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/70_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/70_mix_typeless_typeful.yml deleted file mode 100644 index 6a520c82aad2..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/70_mix_typeless_typeful.yml +++ /dev/null @@ -1,19 +0,0 @@ ---- -"GET mapping with typeless API on an index that has types": - - - do: - indices.create: # not using include_type_name: false on purpose - include_type_name: true - index: index - body: - mappings: - not_doc: - properties: - foo: - type: "keyword" - - - do: - indices.get_mapping: - index: index - - - match: { index.mappings.properties.foo.type: "keyword" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml index 959e08d75dab..4b228ac0ecdb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml @@ -64,6 +64,9 @@ --- "Put mappings with explicit _doc type": + - skip: + version: " - 7.99.99" + reason: "deprecation message changed in 8.0" - do: indices.create: index: test_index @@ -78,5 +81,30 @@ field: type: keyword + - match: { error.type: "illegal_argument_exception" } + - match: { error.reason: "Types cannot be provided in put mapping requests" } + +--- +"Put mappings with explicit _doc type bwc": + - skip: + version: "8.0.0 - " + reason: "old deprecation message for pre 8.0" + features: "node_selector" + - do: + indices.create: + index: test_index + + - do: + node_selector: + version: " - 7.99.99" + catch: bad_request + indices.put_mapping: + index: test_index + body: + _doc: + properties: + field: + type: keyword + - match: { error.type: "illegal_argument_exception" } - match: { error.reason: "Types cannot be provided in put mapping requests, unless the include_type_name parameter is set to true." } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/11_basic_with_types.yml deleted file mode 100644 index 5da9cd4bf707..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/11_basic_with_types.yml +++ /dev/null @@ -1,74 +0,0 @@ ---- -"Test Create and update mapping": - - do: - indices.create: - index: test_index - - - do: - indices.put_mapping: - include_type_name: true - index: test_index - type: test_type - body: - test_type: - properties: - text1: - type: text - analyzer: whitespace - text2: - type: text - analyzer: whitespace - subfield.text3: - type: text - - - do: - indices.get_mapping: - include_type_name: true - index: test_index - - - match: {test_index.mappings.test_type.properties.text1.type: text} - - match: {test_index.mappings.test_type.properties.text1.analyzer: whitespace} - - match: {test_index.mappings.test_type.properties.text2.type: text} - - match: {test_index.mappings.test_type.properties.text2.analyzer: whitespace} - - - do: - indices.put_mapping: - include_type_name: true - index: test_index - type: test_type - body: - test_type: - properties: - text1: - type: text - analyzer: whitespace - fields: - text_raw: - type: keyword - - - - do: - indices.get_mapping: - include_type_name: true - index: test_index - - - match: {test_index.mappings.test_type.properties.text1.type: text} - - match: {test_index.mappings.test_type.properties.subfield.properties.text3.type: text} - - match: {test_index.mappings.test_type.properties.text1.fields.text_raw.type: keyword} - ---- -"Create index with invalid mappings": - - do: - indices.create: - index: test_index - - do: - catch: /illegal_argument_exception/ - indices.put_mapping: - include_type_name: true - index: test_index - type: test_type - body: - test_type: - properties: - "": - type: keyword diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/20_mix_typeless_typeful.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/20_mix_typeless_typeful.yml deleted file mode 100644 index e91ea9210d89..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/20_mix_typeless_typeful.yml +++ /dev/null @@ -1,77 +0,0 @@ ---- -"PUT mapping with typeless API on an index that has types": - - - do: - indices.create: # not using include_type_name: false on purpose - include_type_name: true - index: index - body: - mappings: - not_doc: - properties: - foo: - type: "keyword" - - - do: - indices.put_mapping: - include_type_name: false - index: index - body: - properties: - bar: - type: "long" - - - do: - indices.get_mapping: - include_type_name: false - index: index - - - match: { index.mappings.properties.foo.type: "keyword" } - - match: { index.mappings.properties.bar.type: "long" } - - - do: - indices.put_mapping: - include_type_name: false - index: index - body: - properties: - foo: - type: "keyword" # also test no-op updates that trigger special logic wrt the mapping version - - - do: - catch: /the final mapping would have more than 1 type/ - indices.put_mapping: - include_type_name: true - index: index - type: some_other_type - body: - some_other_type: - properties: - bar: - type: "long" - - ---- -"PUT mapping with _doc on an index that has types": - - do: - indices.create: - include_type_name: true - index: index - body: - mappings: - my_type: - properties: - foo: - type: "keyword" - - - do: - catch: /the final mapping would have more than 1 type/ - indices.put_mapping: - include_type_name: true - index: index - type: _doc - body: - _doc: - properties: - bar: - type: "long" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options_with_types.yml deleted file mode 100644 index 6f9b6f7d9cee..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options_with_types.yml +++ /dev/null @@ -1,227 +0,0 @@ -setup: - - do: - indices.create: - index: test_index1 - - do: - indices.create: - index: test_index2 - - do: - indices.create: - index: foo - - ---- -"put one mapping per index": - - do: - indices.put_mapping: - include_type_name: true - index: test_index1 - type: test_type - body: - test_type: - properties: - text: - type: text - analyzer: whitespace - - do: - indices.put_mapping: - include_type_name: true - index: test_index2 - type: test_type - body: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - - do: - indices.get_mapping: - include_type_name: true - - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} - - - match: { foo.mappings: {} } - ---- -"put mapping in _all index": - - - do: - indices.put_mapping: - include_type_name: true - index: _all - type: test_type - body: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - indices.get_mapping: - include_type_name: true - - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {foo.mappings.test_type.properties.text.type: text} - - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} - ---- -"put mapping in * index": - - do: - indices.put_mapping: - include_type_name: true - index: "*" - type: test_type - body: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - indices.get_mapping: - include_type_name: true - - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {foo.mappings.test_type.properties.text.type: text} - - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} - ---- -"put mapping in prefix* index": - - do: - indices.put_mapping: - include_type_name: true - index: "test_index*" - type: test_type - body: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - indices.get_mapping: - include_type_name: true - - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} - - - match: { foo.mappings: {} } - ---- -"put mapping in list of indices": - - do: - indices.put_mapping: - include_type_name: true - index: [test_index1, test_index2] - type: test_type - body: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - indices.get_mapping: - include_type_name: true - - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} - - - match: { foo.mappings: {} } - ---- -"put mapping with blank index": - - do: - indices.put_mapping: - include_type_name: true - type: test_type - body: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - indices.get_mapping: - include_type_name: true - - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {foo.mappings.test_type.properties.text.type: text} - - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} - ---- -"put mapping with missing type": - - - - do: - catch: param - indices.put_mapping: - include_type_name: true - ---- -"post a mapping with default analyzer twice": - - - do: - indices.put_mapping: - include_type_name: true - index: test_index1 - type: test_type - body: - test_type: - dynamic: false - properties: - text: - analyzer: default - type: text - - - do: - indices.put_mapping: - include_type_name: true - index: test_index1 - type: test_type - body: - test_type: - dynamic: false - properties: - text: - analyzer: default - type: text - - - do: - indices.get_mapping: - include_type_name: true - - - match: {test_index1.mappings.test_type.properties.text.type: text} - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/51_filter_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/51_filter_with_types.yml deleted file mode 100644 index 3405e6390476..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/51_filter_with_types.yml +++ /dev/null @@ -1,57 +0,0 @@ -setup: - - do: - indices.create: - include_type_name: true - index: test - body: - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - test: - properties: - mentions: - type: keyword - notifications: - type: keyword - - - do: - index: - index: test - type: test - id: foo|bar|baz0 - body: { "notifications" : ["abc"] } - - - do: - index: - index: test - type: test - id: foo|bar|baz1 - body: { "mentions" : ["abc"] } - - - do: - indices.refresh: {} - ---- -"Filter aggs with terms lookup and ensure it's cached": - # Because the filter agg rewrites the terms lookup in the rewrite phase the request can be cached - - - do: - search: - rest_total_hits_as_int: true - size: 0 - request_cache: true - body: {"aggs": { "itemsNotify": { "filter": { "terms": { "mentions": { "index": "test", "type": "test", "id": "foo|bar|baz0", "path": "notifications"}}}, "aggs": { "mentions" : {"terms" : { "field" : "mentions" }}}}}} - - # validate result - - match: { hits.total: 2 } - - match: { aggregations.itemsNotify.doc_count: 1 } - - length: { aggregations.itemsNotify.mentions.buckets: 1 } - - match: { aggregations.itemsNotify.mentions.buckets.0.key: "abc" } - # we are using a lookup - this should not cache - - do: - indices.stats: { index: test, metric: request_cache} - - match: { _shards.total: 1 } - - match: { _all.total.request_cache.hit_count: 0 } - - match: { _all.total.request_cache.miss_count: 1 } - - is_true: indices.test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml index 7a3de2005c72..cec9689e4164 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml @@ -28,10 +28,8 @@ setup: body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : {} } } } - match: { hits.total: 1 } - match: { hits.hits.0._index: "test" } - - match: { hits.hits.0._type: "_doc" } - match: { hits.hits.0._id: "1" } - match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._index: "test" } - - match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._type: "_doc" } - match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._id: "1" } - match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._nested.field: "nested_field" } - match: { hits.hits.0.inner_hits.nested_field.hits.hits.0._nested.offset: 0 } @@ -57,7 +55,6 @@ setup: - match: { hits.total: 1 } - match: { hits.hits.0._index: "test" } - - match: { hits.hits.0._type: "_doc" } - match: { hits.hits.0._id: "1" } - match: { hits.hits.0._version: 1 } - match: { hits.hits.0.fields._seq_no: [0] } @@ -80,7 +77,6 @@ setup: - match: { hits.total: 1 } - match: { hits.hits.0._index: "test" } - - match: { hits.hits.0._type: "_doc" } - match: { hits.hits.0._id: "1" } - match: { hits.hits.0._version: 2 } - match: { hits.hits.0.fields._seq_no: [1] } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/100_stored_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/100_stored_fields.yml index a82d7fff480e..d2933a44e586 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/100_stored_fields.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/100_stored_fields.yml @@ -19,7 +19,6 @@ setup: index: test - is_true: hits.hits.0._id - - is_true: hits.hits.0._type - is_true: hits.hits.0._source - do: @@ -30,7 +29,6 @@ setup: stored_fields: [] - is_true: hits.hits.0._id - - is_true: hits.hits.0._type - is_false: hits.hits.0._source - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml index a40ffe94d4ab..5ebbb23cbd31 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml @@ -67,19 +67,16 @@ setup: - match: {hits.total: 6 } - length: {hits.hits: 3 } - match: {hits.hits.0._index: test } - - match: {hits.hits.0._type: _doc } - match: {hits.hits.0.fields.numeric_group: [3] } - match: {hits.hits.0.sort: [36] } - match: {hits.hits.0._id: "6" } - is_false: hits.hits.0.inner_hits - match: {hits.hits.1._index: test } - - match: {hits.hits.1._type: _doc } - match: {hits.hits.1.fields.numeric_group: [1] } - match: {hits.hits.1.sort: [24] } - match: {hits.hits.1._id: "3" } - is_false: hits.hits.1.inner_hits - match: {hits.hits.2._index: test } - - match: {hits.hits.2._type: _doc } - match: {hits.hits.2.fields.numeric_group: [25] } - match: {hits.hits.2.sort: [10] } - match: {hits.hits.2._id: "4" } @@ -100,7 +97,6 @@ setup: - match: {hits.total: 6 } - length: {hits.hits: 1 } - match: {hits.hits.0._index: test } - - match: {hits.hits.0._type: _doc } - match: {hits.hits.0.fields.numeric_group: [25]} - match: {hits.hits.0.sort: [10] } - match: {hits.hits.0._id: "4" } @@ -120,7 +116,6 @@ setup: - match: { hits.total: 6 } - length: { hits.hits: 3 } - match: { hits.hits.0._index: test } - - match: { hits.hits.0._type: _doc } - match: { hits.hits.0.fields.numeric_group: [3] } - match: { hits.hits.0.sort: [36] } - match: { hits.hits.0._id: "6" } @@ -128,7 +123,6 @@ setup: - length: { hits.hits.0.inner_hits.sub_hits.hits.hits: 1 } - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "6" } - match: { hits.hits.1._index: test } - - match: { hits.hits.1._type: _doc } - match: { hits.hits.1.fields.numeric_group: [1] } - match: { hits.hits.1.sort: [24] } - match: { hits.hits.1._id: "3" } @@ -137,7 +131,6 @@ setup: - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.0._id: "2" } - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "1" } - match: { hits.hits.2._index: test } - - match: { hits.hits.2._type: _doc } - match: { hits.hits.2.fields.numeric_group: [25] } - match: { hits.hits.2.sort: [10] } - match: { hits.hits.2._id: "4" } @@ -160,7 +153,6 @@ setup: - match: { hits.total: 6 } - length: { hits.hits: 3 } - match: { hits.hits.0._index: test } - - match: { hits.hits.0._type: _doc } - match: { hits.hits.0.fields.numeric_group: [3] } - match: { hits.hits.0.sort: [36] } - match: { hits.hits.0._id: "6" } @@ -168,7 +160,6 @@ setup: - length: { hits.hits.0.inner_hits.sub_hits.hits.hits: 1 } - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "6" } - match: { hits.hits.1._index: test } - - match: { hits.hits.1._type: _doc } - match: { hits.hits.1.fields.numeric_group: [1] } - match: { hits.hits.1.sort: [24] } - match: { hits.hits.1._id: "3" } @@ -177,7 +168,6 @@ setup: - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.0._id: "2" } - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "1" } - match: { hits.hits.2._index: test } - - match: { hits.hits.2._type: _doc } - match: { hits.hits.2.fields.numeric_group: [25] } - match: { hits.hits.2.sort: [10] } - match: { hits.hits.2._id: "4" } @@ -277,7 +267,6 @@ setup: - match: { hits.total: 6 } - length: { hits.hits: 3 } - match: { hits.hits.0._index: test } - - match: { hits.hits.0._type: _doc } - match: { hits.hits.0.fields.numeric_group: [3] } - match: { hits.hits.0.sort: [36] } - match: { hits.hits.0._id: "6" } @@ -288,7 +277,6 @@ setup: - length: { hits.hits.0.inner_hits.sub_hits_desc.hits.hits: 1 } - match: { hits.hits.0.inner_hits.sub_hits_desc.hits.hits.0._id: "6" } - match: { hits.hits.1._index: test } - - match: { hits.hits.1._type: _doc } - match: { hits.hits.1.fields.numeric_group: [1] } - match: { hits.hits.1.sort: [24] } - match: { hits.hits.1._id: "3" } @@ -300,7 +288,6 @@ setup: - length: { hits.hits.1.inner_hits.sub_hits_desc.hits.hits: 1 } - match: { hits.hits.1.inner_hits.sub_hits_desc.hits.hits.0._id: "3" } - match: { hits.hits.2._index: test } - - match: { hits.hits.2._type: _doc } - match: { hits.hits.2.fields.numeric_group: [25] } - match: { hits.hits.2.sort: [10] } - match: { hits.hits.2._id: "4" } @@ -327,7 +314,6 @@ setup: - match: { hits.total: 6 } - length: { hits.hits: 3 } - match: { hits.hits.0._index: test } - - match: { hits.hits.0._type: _doc } - match: { hits.hits.0.fields.numeric_group: [3] } - match: { hits.hits.0.sort: [36] } - match: { hits.hits.0._id: "6" } @@ -337,7 +323,6 @@ setup: - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "6" } - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._version: 66 } - match: { hits.hits.1._index: test } - - match: { hits.hits.1._type: _doc } - match: { hits.hits.1.fields.numeric_group: [1] } - match: { hits.hits.1.sort: [24] } - match: { hits.hits.1._id: "3" } @@ -349,7 +334,6 @@ setup: - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "1" } - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._version: 11 } - match: { hits.hits.2._index: test } - - match: { hits.hits.2._type: _doc } - match: { hits.hits.2.fields.numeric_group: [25] } - match: { hits.hits.2.sort: [10] } - match: { hits.hits.2._id: "4" } @@ -366,7 +350,6 @@ setup: - do: indices.put_mapping: - include_type_name: false index: test body: properties: @@ -426,7 +409,6 @@ setup: - gte: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._seq_no: 0 } - gte: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._primary_term: 1 } - match: { hits.hits.2._index: test } - - match: { hits.hits.2._type: _doc } - match: { hits.hits.2.fields.numeric_group: [25] } - match: { hits.hits.2.sort: [10] } - match: { hits.hits.2._id: "4" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml index 7724fdc8c067..84ec1ebee4b7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml @@ -35,7 +35,7 @@ search: rest_total_hits_as_int: true index: "search_index" - body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "type": "_doc", "id": "1", "path": "followers"} } } } + body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "id": "1", "path": "followers"} } } } - do: indices.create: index: lookup_index @@ -58,7 +58,7 @@ search: rest_total_hits_as_int: true index: "search_index" - body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "type": "_doc", "id": "1", "path": "followers"} } } } + body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "id": "1", "path": "followers"} } } } - match: { _shards.total: 5 } - match: { _shards.successful: 5 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/171_terms_query_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/171_terms_query_with_types.yml deleted file mode 100644 index 6c99cce0fe46..000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/171_terms_query_with_types.yml +++ /dev/null @@ -1,58 +0,0 @@ ---- -"Terms Query with No.of terms exceeding index.max_terms_count should FAIL": - - - do: - indices.create: - include_type_name: true - index: test_index - body: - settings: - number_of_shards: 1 - index.max_terms_count: 2 - mappings: - test_type: - properties: - user: - type: keyword - followers: - type: keyword - - do: - bulk: - refresh: true - body: - - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u1"}}' - - '{"user": "u1", "followers": ["u2", "u3"]}' - - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u2"}}' - - '{"user": "u2", "followers": ["u1", "u3", "u4"]}' - - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u3"}}' - - '{"user": "u3", "followers": ["u1"]}' - - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "u4"}}' - - '{"user": "u4", "followers": ["u3"]}' - - - do: - search: - rest_total_hits_as_int: true - index: test_index - body: {"query" : {"terms" : {"user" : ["u1", "u2"]}}} - - match: { hits.total: 2 } - - - do: - catch: bad_request - search: - rest_total_hits_as_int: true - index: test_index - body: {"query" : {"terms" : {"user" : ["u1", "u2", "u3"]}}} - - - do: - search: - rest_total_hits_as_int: true - index: test_index - body: {"query" : {"terms" : {"user" : {"index" : "test_index", "type" : "test_type", "id" : "u1", "path" : "followers"}}}} - - match: { hits.total: 2 } - - - do: - catch: bad_request - search: - rest_total_hits_as_int: true - index: test_index - body: {"query" : {"terms" : {"user" : {"index" : "test_index", "type" : "test_type", "id" : "u2", "path" : "followers"}}}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/20_default_values.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/20_default_values.yml index 7ee665a47a6e..b2ec345a6fe8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/20_default_values.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/20_default_values.yml @@ -46,7 +46,6 @@ setup: - match: {hits.total: 1} - match: {hits.hits.0._index: test_1 } - - match: {hits.hits.0._type: _doc } - match: {hits.hits.0._id: "1" } - do: @@ -60,7 +59,6 @@ setup: - match: {hits.total: 1} - match: {hits.hits.0._index: test_2 } - - match: {hits.hits.0._type: _doc } - match: {hits.hits.0._id: "42" } --- diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/70_response_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/70_response_filtering.yml index d306cb7b1ad5..e38f5f862a27 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/70_response_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/70_response_filtering.yml @@ -31,10 +31,8 @@ - is_true: _shards.total - is_true: hits.total - is_true: hits.hits.0._index - - is_true: hits.hits.0._type - is_true: hits.hits.0._id - is_true: hits.hits.1._index - - is_true: hits.hits.1._type - is_true: hits.hits.1._id - do: @@ -48,10 +46,8 @@ - is_false: _shards.total - is_false: hits.total - is_false: hits.hits.0._index - - is_false: hits.hits.0._type - is_false: hits.hits.0._id - is_false: hits.hits.1._index - - is_false: hits.hits.1._type - is_false: hits.hits.1._id - do: @@ -65,10 +61,8 @@ - is_true: _shards.total - is_false: hits.total - is_false: hits.hits.0._index - - is_false: hits.hits.0._type - is_false: hits.hits.0._id - is_false: hits.hits.1._index - - is_false: hits.hits.1._type - is_false: hits.hits.1._id - do: @@ -82,10 +76,8 @@ - is_true: _shards.total - is_true: hits.total - is_true: hits.hits.0._index - - is_false: hits.hits.0._type - is_true: hits.hits.0._id - is_true: hits.hits.1._index - - is_false: hits.hits.1._type - is_true: hits.hits.1._id --- diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yml index be7a45d75129..ff001056957b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yml @@ -41,7 +41,6 @@ setup: - match: {hits.total: 3 } - length: {hits.hits: 1 } - match: {hits.hits.0._index: test } - - match: {hits.hits.0._type: _doc } - match: {hits.hits.0._id: "172" } - match: {hits.hits.0.sort: [24, "172"] } @@ -60,7 +59,6 @@ setup: - match: {hits.total: 3 } - length: {hits.hits: 1 } - match: {hits.hits.0._index: test } - - match: {hits.hits.0._type: _doc } - match: {hits.hits.0._id: "42" } - match: {hits.hits.0.sort: [18, "42"] } @@ -79,7 +77,6 @@ setup: - match: {hits.total: 3} - length: {hits.hits: 1 } - match: {hits.hits.0._index: test } - - match: {hits.hits.0._type: _doc } - match: {hits.hits.0._id: "1" } - match: {hits.hits.0.sort: [18, "1"] } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/20_completion.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/20_completion.yml index de28dc7f16ba..ff6ecfb0c34c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/20_completion.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/20_completion.yml @@ -281,12 +281,10 @@ setup: - length: { suggest.result.0.options: 2 } - match: { suggest.result.0.options.0.text: "baz" } - match: { suggest.result.0.options.0._index: "test" } - - match: { suggest.result.0.options.0._type: "_doc" } - match: { suggest.result.0.options.0._source.title: "title_baz" } - match: { suggest.result.0.options.0._source.count: 3 } - match: { suggest.result.0.options.1.text: "bar" } - match: { suggest.result.0.options.1._index: "test" } - - match: { suggest.result.0.options.1._type: "_doc" } - match: { suggest.result.0.options.1._source.title: "title_bar" } - match: { suggest.result.0.options.1._source.count: 4 } diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index d15ab1734bee..821686e8894f 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchException; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.transport.TcpTransport; @@ -283,6 +284,10 @@ public class ElasticsearchException extends RuntimeException implements ToXConte public static ElasticsearchException readException(StreamInput input, int id) throws IOException { CheckedFunction elasticsearchException = ID_TO_SUPPLIER.get(id); if (elasticsearchException == null) { + if (id == 127 && input.getVersion().before(Version.V_7_5_0)) { + // was SearchContextException + return new SearchException(input); + } throw new IllegalStateException("unknown exception for id: " + id); } return elasticsearchException.apply(input); @@ -965,8 +970,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte TcpTransport.HttpRequestOnTransportException::new, 125, UNKNOWN_VERSION_ADDED), MAPPER_PARSING_EXCEPTION(org.elasticsearch.index.mapper.MapperParsingException.class, org.elasticsearch.index.mapper.MapperParsingException::new, 126, UNKNOWN_VERSION_ADDED), - SEARCH_CONTEXT_EXCEPTION(org.elasticsearch.search.SearchContextException.class, - org.elasticsearch.search.SearchContextException::new, 127, UNKNOWN_VERSION_ADDED), + // 127 used to be org.elasticsearch.search.SearchContextException SEARCH_SOURCE_BUILDER_EXCEPTION(org.elasticsearch.search.builder.SearchSourceBuilderException.class, org.elasticsearch.search.builder.SearchSourceBuilderException::new, 128, UNKNOWN_VERSION_ADDED), // 129 was EngineClosedException diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java index a63f2cf81047..b16959e9d18d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java @@ -53,7 +53,7 @@ public class CreateSnapshotResponse extends ActionResponse implements ToXContent CreateSnapshotResponse() {} - CreateSnapshotResponse(@Nullable SnapshotInfo snapshotInfo) { + public CreateSnapshotResponse(@Nullable SnapshotInfo snapshotInfo) { this.snapshotInfo = snapshotInfo; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java index 27e9a2a33e59..c687f31d52cf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -64,6 +65,7 @@ public class SnapshotStats implements Writeable, ToXContentObject { long incrementalSize, long totalSize, long processedSize) { this.startTime = startTime; this.time = time; + assert time >= 0 : "Tried to initialize snapshot stats with negative total time [" + time + "]"; this.incrementalFileCount = incrementalFileCount; this.totalFileCount = totalFileCount; this.processedFileCount = processedFileCount; @@ -315,6 +317,8 @@ public class SnapshotStats implements Writeable, ToXContentObject { // Update duration time = endTime - startTime; } + assert time >= 0 + : "Update with [" + Strings.toString(stats) + "][" + updateTimestamps + "] resulted in negative total time [" + time + "]"; } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java index b224b0913060..4723775c6f7e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java @@ -101,6 +101,7 @@ public class SnapshotStatus implements ToXContentObject, Writeable { this.shards = Objects.requireNonNull(shards); this.includeGlobalState = includeGlobalState; shardsStats = new SnapshotShardsStats(shards); + assert time >= 0 : "time must be >= 0 but received [" + time + "]"; updateShardStats(startTime, time); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index 4cfc1bafe33b..bf1e2bef6343 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -243,9 +243,14 @@ public class TransportSnapshotsStatusAction extends TransportMasterNodeAction= startTime || (endTime == 0L && snapshotInfo.state().completed() == false) + : "Inconsistent timestamps found in SnapshotInfo [" + snapshotInfo + "]"; builder.add(new SnapshotStatus(new Snapshot(repositoryName, snapshotId), state, Collections.unmodifiableList(shardStatusBuilder), snapshotInfo.includeGlobalState(), - startTime, snapshotInfo.endTime() - startTime)); + startTime, + // Use current time to calculate overall runtime for in-progress snapshots that have endTime == 0 + (endTime == 0 ? threadPool.absoluteTimeInMillis() : endTime) - startTime)); } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java index 050bb7b5d8ba..97223230dfa6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ShardValidateQueryRequest.java @@ -19,8 +19,8 @@ package org.elasticsearch.action.admin.indices.validate.query; +import org.elasticsearch.Version; import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; @@ -36,7 +36,6 @@ import java.util.Objects; public class ShardValidateQueryRequest extends BroadcastShardRequest { private QueryBuilder query; - private String[] types = Strings.EMPTY_ARRAY; private boolean explain; private boolean rewrite; private long nowInMillis; @@ -45,12 +44,12 @@ public class ShardValidateQueryRequest extends BroadcastShardRequest { public ShardValidateQueryRequest(StreamInput in) throws IOException { super(in); query = in.readNamedWriteable(QueryBuilder.class); - - int typesSize = in.readVInt(); - if (typesSize > 0) { - types = new String[typesSize]; - for (int i = 0; i < typesSize; i++) { - types[i] = in.readString(); + if (in.getVersion().before(Version.V_8_0_0)) { + int typesSize = in.readVInt(); + if (typesSize > 0) { + for (int i = 0; i < typesSize; i++) { + in.readString(); + } } } filteringAliases = new AliasFilter(in); @@ -62,7 +61,6 @@ public class ShardValidateQueryRequest extends BroadcastShardRequest { public ShardValidateQueryRequest(ShardId shardId, AliasFilter filteringAliases, ValidateQueryRequest request) { super(shardId, request); this.query = request.query(); - this.types = request.types(); this.explain = request.explain(); this.rewrite = request.rewrite(); this.filteringAliases = Objects.requireNonNull(filteringAliases, "filteringAliases must not be null"); @@ -73,10 +71,6 @@ public class ShardValidateQueryRequest extends BroadcastShardRequest { return query; } - public String[] types() { - return this.types; - } - public boolean explain() { return this.explain; } @@ -97,9 +91,8 @@ public class ShardValidateQueryRequest extends BroadcastShardRequest { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeNamedWriteable(query); - out.writeVInt(types.length); - for (String type : types) { - out.writeString(type); + if (out.getVersion().before(Version.V_8_0_0)) { + out.writeVInt(0); // no types to filter } filteringAliases.writeTo(out); out.writeBoolean(explain); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index 498f2d6e7b29..89b3bcb7789b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -46,7 +46,7 @@ import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.ShardSearchLocalRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -194,7 +194,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction< boolean valid; String explanation = null; String error = null; - ShardSearchLocalRequest shardSearchLocalRequest = new ShardSearchLocalRequest(request.shardId(), + ShardSearchRequest shardSearchLocalRequest = new ShardSearchRequest(request.shardId(), request.nowInMillis(), request.filteringAliases()); SearchContext searchContext = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT); try { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java index 06965beb44df..64a05adb1348 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.validate.query; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.support.IndicesOptions; @@ -47,8 +48,6 @@ public class ValidateQueryRequest extends BroadcastRequest private boolean rewrite; private boolean allShards; - private String[] types = Strings.EMPTY_ARRAY; - long nowInMillis; public ValidateQueryRequest() { @@ -58,11 +57,12 @@ public class ValidateQueryRequest extends BroadcastRequest public ValidateQueryRequest(StreamInput in) throws IOException { super(in); query = in.readNamedWriteable(QueryBuilder.class); - int typesSize = in.readVInt(); - if (typesSize > 0) { - types = new String[typesSize]; - for (int i = 0; i < typesSize; i++) { - types[i] = in.readString(); + if (in.getVersion().before(Version.V_8_0_0)) { + int typesSize = in.readVInt(); + if (typesSize > 0) { + for (int i = 0; i < typesSize; i++) { + in.readString(); + } } } explain = in.readBoolean(); @@ -100,29 +100,6 @@ public class ValidateQueryRequest extends BroadcastRequest return this; } - /** - * The types of documents the query will run against. Defaults to all types. - * - * @deprecated Types are in the process of being removed. Instead of using a type, prefer to - * filter on a field on the document. - */ - @Deprecated - public String[] types() { - return this.types; - } - - /** - * The types of documents the query will run against. Defaults to all types. - * - * @deprecated Types are in the process of being removed. Instead of using a type, prefer to - * filter on a field on the document. - */ - @Deprecated - public ValidateQueryRequest types(String... types) { - this.types = types; - return this; - } - /** * Indicate if detailed information about query is requested */ @@ -169,9 +146,8 @@ public class ValidateQueryRequest extends BroadcastRequest public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeNamedWriteable(query); - out.writeVInt(types.length); - for (String type : types) { - out.writeString(type); + if (out.getVersion().before(Version.V_8_0_0)) { + out.writeVInt(0); // no types to filter } out.writeBoolean(explain); out.writeBoolean(rewrite); @@ -180,7 +156,7 @@ public class ValidateQueryRequest extends BroadcastRequest @Override public String toString() { - return "[" + Arrays.toString(indices) + "]" + Arrays.toString(types) + ", query[" + query + "], explain:" + explain + + return "[" + Arrays.toString(indices) + "] query[" + query + "], explain:" + explain + ", rewrite:" + rewrite + ", all_shards:" + allShards; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java index bf34f8b27b4f..4abf346629de 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java @@ -30,14 +30,6 @@ public class ValidateQueryRequestBuilder super(client, action, new ValidateQueryRequest()); } - /** - * The types of documents the query will run against. Defaults to all types. - */ - public ValidateQueryRequestBuilder setTypes(String... types) { - request.types(types); - return this; - } - /** * The query to validate. * diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 371def78898d..8b305bc980bd 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -56,7 +56,6 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -159,115 +158,13 @@ public class TransportBulkAction extends HandledTransportAction indicesMetaData = metaData.indices(); for (DocWriteRequest actionRequest : bulkRequest.requests) { IndexRequest indexRequest = getIndexWriteRequest(actionRequest); - if (indexRequest != null) { - if (indexRequest.isPipelineResolved() == false) { - final String requestPipeline = indexRequest.getPipeline(); - indexRequest.setPipeline(IngestService.NOOP_PIPELINE_NAME); - boolean requestCanOverridePipeline = true; - String requiredPipeline = null; - // start to look for default or required pipelines via settings found in the index meta data - IndexMetaData indexMetaData = indicesMetaData.get(actionRequest.index()); - // check the alias for the index request (this is how normal index requests are modeled) - if (indexMetaData == null && indexRequest.index() != null) { - AliasOrIndex indexOrAlias = metaData.getAliasAndIndexLookup().get(indexRequest.index()); - if (indexOrAlias != null && indexOrAlias.isAlias()) { - AliasOrIndex.Alias alias = (AliasOrIndex.Alias) indexOrAlias; - indexMetaData = alias.getWriteIndex(); - } - } - // check the alias for the action request (this is how upserts are modeled) - if (indexMetaData == null && actionRequest.index() != null) { - AliasOrIndex indexOrAlias = metaData.getAliasAndIndexLookup().get(actionRequest.index()); - if (indexOrAlias != null && indexOrAlias.isAlias()) { - AliasOrIndex.Alias alias = (AliasOrIndex.Alias) indexOrAlias; - indexMetaData = alias.getWriteIndex(); - } - } - if (indexMetaData != null) { - final Settings indexSettings = indexMetaData.getSettings(); - if (IndexSettings.REQUIRED_PIPELINE.exists(indexSettings)) { - // find the required pipeline if one is defined from an existing index - requiredPipeline = IndexSettings.REQUIRED_PIPELINE.get(indexSettings); - assert IndexSettings.DEFAULT_PIPELINE.get(indexSettings).equals(IngestService.NOOP_PIPELINE_NAME) : - IndexSettings.DEFAULT_PIPELINE.get(indexSettings); - indexRequest.setPipeline(requiredPipeline); - requestCanOverridePipeline = false; - } else { - // find the default pipeline if one is defined from an existing index - String defaultPipeline = IndexSettings.DEFAULT_PIPELINE.get(indexSettings); - indexRequest.setPipeline(defaultPipeline); - } - } else if (indexRequest.index() != null) { - // the index does not exist yet (and is valid request), so match index templates to look for a default pipeline - List templates = MetaDataIndexTemplateService.findTemplates(metaData, indexRequest.index()); - assert (templates != null); - // order of templates are highest order first, we have to iterate through them all though - String defaultPipeline = null; - for (IndexTemplateMetaData template : templates) { - final Settings settings = template.settings(); - if (requiredPipeline == null && IndexSettings.REQUIRED_PIPELINE.exists(settings)) { - requiredPipeline = IndexSettings.REQUIRED_PIPELINE.get(settings); - requestCanOverridePipeline = false; - // we can not break in case a lower-order template has a default pipeline that we need to reject - } else if (defaultPipeline == null && IndexSettings.DEFAULT_PIPELINE.exists(settings)) { - defaultPipeline = IndexSettings.DEFAULT_PIPELINE.get(settings); - // we can not break in case a lower-order template has a required pipeline that we need to reject - } - } - if (requiredPipeline != null && defaultPipeline != null) { - // we can not have picked up a required and a default pipeline from applying templates - final String message = String.format( - Locale.ROOT, - "required pipeline [%s] and default pipeline [%s] can not both be set", - requiredPipeline, - defaultPipeline); - throw new IllegalArgumentException(message); - } - final String pipeline; - if (requiredPipeline != null) { - pipeline = requiredPipeline; - } else { - pipeline = Objects.requireNonNullElse(defaultPipeline, IngestService.NOOP_PIPELINE_NAME); - } - indexRequest.setPipeline(pipeline); - } - - if (requestPipeline != null) { - if (requestCanOverridePipeline == false) { - final String message = String.format( - Locale.ROOT, - "request pipeline [%s] can not override required pipeline [%s]", - requestPipeline, - requiredPipeline); - throw new IllegalArgumentException(message); - } else { - indexRequest.setPipeline(requestPipeline); - } - } - - if (IngestService.NOOP_PIPELINE_NAME.equals(indexRequest.getPipeline()) == false) { - hasIndexRequestsWithPipelines = true; - } - /* - * We have to track whether or not the pipeline for this request has already been resolved. It can happen that the - * pipeline for this request has already been derived yet we execute this loop again. That occurs if the bulk request - * has been forwarded by a non-ingest coordinating node to an ingest node. In this case, the coordinating node will have - * already resolved the pipeline for this request. It is important that we are able to distinguish this situation as we - * can not double-resolve the pipeline because we will not be able to distinguish the case of the pipeline having been - * set from a request pipeline parameter versus having been set by the resolution. We need to be able to distinguish - * these cases as we need to reject the request if the pipeline was set by a required pipeline and there is a request - * pipeline parameter too. - */ - indexRequest.isPipelineResolved(true); - } else if (IngestService.NOOP_PIPELINE_NAME.equals(indexRequest.getPipeline()) == false) { - hasIndexRequestsWithPipelines = true; - } + // Each index request needs to be evaluated, because this method also modifies the IndexRequest + boolean indexRequestHasPipeline = resolveRequiredOrDefaultPipeline(actionRequest, indexRequest, metaData); + hasIndexRequestsWithPipelines |= indexRequestHasPipeline; } - } if (hasIndexRequestsWithPipelines) { @@ -363,6 +260,112 @@ public class TransportBulkAction extends HandledTransportAction originalRequest, + IndexRequest indexRequest, + MetaData metaData) { + + if (indexRequest.isPipelineResolved() == false) { + final String requestPipeline = indexRequest.getPipeline(); + indexRequest.setPipeline(IngestService.NOOP_PIPELINE_NAME); + boolean requestCanOverridePipeline = true; + String requiredPipeline = null; + // start to look for default or required pipelines via settings found in the index meta data + IndexMetaData indexMetaData = metaData.indices().get(originalRequest.index()); + // check the alias for the index request (this is how normal index requests are modeled) + if (indexMetaData == null && indexRequest.index() != null) { + AliasOrIndex indexOrAlias = metaData.getAliasAndIndexLookup().get(indexRequest.index()); + if (indexOrAlias != null && indexOrAlias.isAlias()) { + AliasOrIndex.Alias alias = (AliasOrIndex.Alias) indexOrAlias; + indexMetaData = alias.getWriteIndex(); + } + } + // check the alias for the action request (this is how upserts are modeled) + if (indexMetaData == null && originalRequest.index() != null) { + AliasOrIndex indexOrAlias = metaData.getAliasAndIndexLookup().get(originalRequest.index()); + if (indexOrAlias != null && indexOrAlias.isAlias()) { + AliasOrIndex.Alias alias = (AliasOrIndex.Alias) indexOrAlias; + indexMetaData = alias.getWriteIndex(); + } + } + if (indexMetaData != null) { + final Settings indexSettings = indexMetaData.getSettings(); + if (IndexSettings.REQUIRED_PIPELINE.exists(indexSettings)) { + // find the required pipeline if one is defined from an existing index + requiredPipeline = IndexSettings.REQUIRED_PIPELINE.get(indexSettings); + assert IndexSettings.DEFAULT_PIPELINE.get(indexSettings).equals(IngestService.NOOP_PIPELINE_NAME) : + IndexSettings.DEFAULT_PIPELINE.get(indexSettings); + indexRequest.setPipeline(requiredPipeline); + requestCanOverridePipeline = false; + } else { + // find the default pipeline if one is defined from an existing index + String defaultPipeline = IndexSettings.DEFAULT_PIPELINE.get(indexSettings); + indexRequest.setPipeline(defaultPipeline); + } + } else if (indexRequest.index() != null) { + // the index does not exist yet (and is valid request), so match index templates to look for a default pipeline + List templates = MetaDataIndexTemplateService.findTemplates(metaData, indexRequest.index()); + assert (templates != null); + // order of templates are highest order first, we have to iterate through them all though + String defaultPipeline = null; + for (IndexTemplateMetaData template : templates) { + final Settings settings = template.settings(); + if (requiredPipeline == null && IndexSettings.REQUIRED_PIPELINE.exists(settings)) { + requiredPipeline = IndexSettings.REQUIRED_PIPELINE.get(settings); + requestCanOverridePipeline = false; + // we can not break in case a lower-order template has a default pipeline that we need to reject + } else if (defaultPipeline == null && IndexSettings.DEFAULT_PIPELINE.exists(settings)) { + defaultPipeline = IndexSettings.DEFAULT_PIPELINE.get(settings); + // we can not break in case a lower-order template has a required pipeline that we need to reject + } + } + if (requiredPipeline != null && defaultPipeline != null) { + // we can not have picked up a required and a default pipeline from applying templates + final String message = String.format( + Locale.ROOT, + "required pipeline [%s] and default pipeline [%s] can not both be set", + requiredPipeline, + defaultPipeline); + throw new IllegalArgumentException(message); + } + final String pipeline; + if (requiredPipeline != null) { + pipeline = requiredPipeline; + } else { + pipeline = Objects.requireNonNullElse(defaultPipeline, IngestService.NOOP_PIPELINE_NAME); + } + indexRequest.setPipeline(pipeline); + } + + if (requestPipeline != null) { + if (requestCanOverridePipeline == false) { + final String message = String.format( + Locale.ROOT, + "request pipeline [%s] can not override required pipeline [%s]", + requestPipeline, + requiredPipeline); + throw new IllegalArgumentException(message); + } else { + indexRequest.setPipeline(requestPipeline); + } + } + + /* + * We have to track whether or not the pipeline for this request has already been resolved. It can happen that the + * pipeline for this request has already been derived yet we execute this loop again. That occurs if the bulk request + * has been forwarded by a non-ingest coordinating node to an ingest node. In this case, the coordinating node will have + * already resolved the pipeline for this request. It is important that we are able to distinguish this situation as we + * can not double-resolve the pipeline because we will not be able to distinguish the case of the pipeline having been + * set from a request pipeline parameter versus having been set by the resolution. We need to be able to distinguish + * these cases as we need to reject the request if the pipeline was set by a required pipeline and there is a request + * pipeline parameter too. + */ + indexRequest.isPipelineResolved(true); + } + + // Return whether this index request has a pipeline + return IngestService.NOOP_PIPELINE_NAME.equals(indexRequest.getPipeline()) == false; + } + boolean needToCheck() { return autoCreateIndex.needToCheck(); } diff --git a/server/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java b/server/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java index bbb9d24469c9..95e797c7a6a7 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java +++ b/server/src/main/java/org/elasticsearch/action/explain/ExplainRequest.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.explain; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.support.single.shard.SingleShardRequest; @@ -44,7 +45,6 @@ public class ExplainRequest extends SingleShardRequest implement private static final ParseField QUERY_FIELD = new ParseField("query"); - private String type = MapperService.SINGLE_MAPPING_NAME; private String id; private String routing; private String preference; @@ -59,16 +59,6 @@ public class ExplainRequest extends SingleShardRequest implement public ExplainRequest() { } - /** - * @deprecated Types are in the process of being removed. Use {@link ExplainRequest(String, String) instead.} - */ - @Deprecated - public ExplainRequest(String index, String type, String id) { - this.index = index; - this.type = type; - this.id = id; - } - public ExplainRequest(String index, String id) { this.index = index; this.id = id; @@ -76,7 +66,10 @@ public class ExplainRequest extends SingleShardRequest implement ExplainRequest(StreamInput in) throws IOException { super(in); - type = in.readString(); + if (in.getVersion().before(Version.V_8_0_0)) { + String type = in.readString(); + assert MapperService.SINGLE_MAPPING_NAME.equals(type); + } id = in.readString(); routing = in.readOptionalString(); preference = in.readOptionalString(); @@ -87,23 +80,6 @@ public class ExplainRequest extends SingleShardRequest implement nowInMillis = in.readVLong(); } - /** - * @deprecated Types are in the process of being removed. - */ - @Deprecated - public String type() { - return type; - } - - /** - * @deprecated Types are in the process of being removed. - */ - @Deprecated - public ExplainRequest type(String type) { - this.type = type; - return this; - } - public String id() { return id; } @@ -185,9 +161,6 @@ public class ExplainRequest extends SingleShardRequest implement @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validateNonNullIndex(); - if (Strings.isEmpty(type)) { - validationException = addValidationError("type is missing", validationException); - } if (Strings.isEmpty(id)) { validationException = addValidationError("id is missing", validationException); } @@ -200,7 +173,9 @@ public class ExplainRequest extends SingleShardRequest implement @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(type); + if (out.getVersion().before(Version.V_8_0_0)) { + out.writeString(MapperService.SINGLE_MAPPING_NAME); + } out.writeString(id); out.writeOptionalString(routing); out.writeOptionalString(preference); diff --git a/server/src/main/java/org/elasticsearch/action/explain/ExplainRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/explain/ExplainRequestBuilder.java index d2d9bb3b820a..a2ddc5762e07 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/ExplainRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/explain/ExplainRequestBuilder.java @@ -35,16 +35,8 @@ public class ExplainRequestBuilder extends SingleShardOperationRequestBuilder PARSER = new ConstructingObjectParser<>("explain", true, - (arg, exists) -> new ExplainResponse((String) arg[0], (String) arg[1], (String) arg[2], exists, (Explanation) arg[3], - (GetResult) arg[4])); + (arg, exists) -> new ExplainResponse((String) arg[0], (String) arg[1], exists, (Explanation) arg[2], + (GetResult) arg[3])); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), _INDEX); - PARSER.declareString(ConstructingObjectParser.constructorArg(), _TYPE); PARSER.declareString(ConstructingObjectParser.constructorArg(), _ID); final ConstructingObjectParser explanationParser = new ConstructingObjectParser<>("explanation", true, arg -> { @@ -187,7 +177,6 @@ public class ExplainResponse extends ActionResponse implements StatusToXContentO public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(_INDEX.getPreferredName(), index); - builder.field(_TYPE.getPreferredName(), type); builder.field(_ID.getPreferredName(), id); builder.field(MATCHED.getPreferredName(), isMatch()); if (hasExplanation()) { @@ -229,7 +218,6 @@ public class ExplainResponse extends ActionResponse implements StatusToXContentO } ExplainResponse other = (ExplainResponse) obj; return index.equals(other.index) - && type.equals(other.type) && id.equals(other.id) && Objects.equals(explanation, other.explanation) && getResult.isExists() == other.getResult.isExists() @@ -239,6 +227,6 @@ public class ExplainResponse extends ActionResponse implements StatusToXContentO @Override public int hashCode() { - return Objects.hash(index, type, id, explanation, getResult.isExists(), getResult.sourceAsMap(), getResult.getFields()); + return Objects.hash(index, id, explanation, getResult.isExists(), getResult.sourceAsMap(), getResult.getFields()); } } diff --git a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index 70b9704c77d7..e5e73599146c 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -43,7 +43,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.ShardSearchLocalRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.rescore.Rescorer; import org.elasticsearch.tasks.Task; @@ -108,8 +108,7 @@ public class TransportExplainAction extends TransportSingleShardAction exten } @Override - public final ShardSearchTransportRequest buildShardSearchRequest(SearchShardIterator shardIt) { + public final ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt) { AliasFilter filter = aliasFilter.get(shardIt.shardId().getIndex().getUUID()); assert filter != null; float indexBoost = concreteIndexBoosts.getOrDefault(shardIt.shardId().getIndex().getUUID(), DEFAULT_INDEX_BOOST); String indexName = shardIt.shardId().getIndex().getName(); final String[] routings = indexRoutings.getOrDefault(indexName, Collections.emptySet()) .toArray(new String[0]); - return new ShardSearchTransportRequest(shardIt.getOriginalIndices(), request, shardIt.shardId(), getNumShards(), + return new ShardSearchRequest(shardIt.getOriginalIndices(), request, shardIt.shardId(), getNumShards(), filter, indexBoost, timeProvider.getAbsoluteStartMillis(), shardIt.getClusterAlias(), routings); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java index 28838defa3ec..994ef5553bbc 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Nullable; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.internal.ShardSearchTransportRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.transport.Transport; import java.util.concurrent.Executor; @@ -109,7 +109,7 @@ interface SearchPhaseContext extends Executor { /** * Builds an request for the initial search phase. */ - ShardSearchTransportRequest buildShardSearchRequest(SearchShardIterator shardIt); + ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt); /** * Processes the phase transition from on phase to another. This method handles all errors that happen during the initial run execution diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index 37c8fe4fcbd8..4b66ed885db2 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -41,7 +41,6 @@ import org.elasticsearch.search.fetch.ShardFetchRequest; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; import org.elasticsearch.search.internal.InternalScrollSearchRequest; import org.elasticsearch.search.internal.ShardSearchRequest; -import org.elasticsearch.search.internal.ShardSearchTransportRequest; import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.ScrollQuerySearchResult; @@ -109,7 +108,7 @@ public class SearchTransportService { TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, SearchFreeContextResponse::new)); } - public void sendCanMatch(Transport.Connection connection, final ShardSearchTransportRequest request, SearchTask task, final + public void sendCanMatch(Transport.Connection connection, final ShardSearchRequest request, SearchTask task, final ActionListener listener) { transportService.sendChildRequest(connection, QUERY_CAN_MATCH_NAME, request, task, TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, SearchService.CanMatchResponse::new)); @@ -120,13 +119,13 @@ public class SearchTransportService { TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, (in) -> TransportResponse.Empty.INSTANCE)); } - public void sendExecuteDfs(Transport.Connection connection, final ShardSearchTransportRequest request, SearchTask task, + public void sendExecuteDfs(Transport.Connection connection, final ShardSearchRequest request, SearchTask task, final SearchActionListener listener) { transportService.sendChildRequest(connection, DFS_ACTION_NAME, request, task, new ConnectionCountingHandler<>(listener, DfsSearchResult::new, clientConnections, connection.getNode().getId())); } - public void sendExecuteQuery(Transport.Connection connection, final ShardSearchTransportRequest request, SearchTask task, + public void sendExecuteQuery(Transport.Connection connection, final ShardSearchRequest request, SearchTask task, final SearchActionListener listener) { // we optimize this and expect a QueryFetchSearchResult if we only have a single shard in the search request // this used to be the QUERY_AND_FETCH which doesn't exist anymore. @@ -306,7 +305,7 @@ public class SearchTransportService { TransportActionProxy.registerProxyAction(transportService, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, (in) -> TransportResponse.Empty.INSTANCE); - transportService.registerRequestHandler(DFS_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, + transportService.registerRequestHandler(DFS_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchRequest::new, (request, channel, task) -> { searchService.executeDfsPhase(request, (SearchTask) task, new ActionListener() { @Override @@ -330,7 +329,7 @@ public class SearchTransportService { }); TransportActionProxy.registerProxyAction(transportService, DFS_ACTION_NAME, DfsSearchResult::new); - transportService.registerRequestHandler(QUERY_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, + transportService.registerRequestHandler(QUERY_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchRequest::new, (request, channel, task) -> { searchService.executeQueryPhase(request, (SearchTask) task, new ChannelActionListener<>( channel, QUERY_ACTION_NAME, request)); @@ -374,7 +373,7 @@ public class SearchTransportService { TransportActionProxy.registerProxyAction(transportService, FETCH_ID_ACTION_NAME, FetchSearchResult::new); // this is cheap, it does not fetch during the rewrite phase, so we can let it quickly execute on a networking thread - transportService.registerRequestHandler(QUERY_CAN_MATCH_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, + transportService.registerRequestHandler(QUERY_CAN_MATCH_NAME, ThreadPool.Names.SAME, ShardSearchRequest::new, (request, channel, task) -> { searchService.canMatch(request, new ChannelActionListener<>(channel, QUERY_CAN_MATCH_NAME, request)); }); diff --git a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java index 0644916d8f56..25e7a25335d6 100644 --- a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java +++ b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java @@ -154,47 +154,6 @@ public class IndicesOptions implements ToXContentFragment { return new IndicesOptions(opts, wildcards); } - /** - * See: {@link #fromByte(byte)} - */ - private static byte toByte(IndicesOptions options) { - byte id = 0; - if (options.ignoreUnavailable()) { - id |= 1; - } - if (options.allowNoIndices()) { - id |= 2; - } - if (options.expandWildcardsOpen()) { - id |= 4; - } - if (options.expandWildcardsClosed()) { - id |= 8; - } - // true is default here, for bw comp we keep the first 16 values - // in the array same as before + the default value for the new flag - if (options.allowAliasesToMultipleIndices() == false) { - id |= 16; - } - if (options.forbidClosedIndices()) { - id |= 32; - } - if (options.ignoreAliases()) { - id |= 64; - } - return id; - } - - private static final IndicesOptions[] OLD_VALUES; - - static { - short max = 1 << 7; - OLD_VALUES = new IndicesOptions[max]; - for (short id = 0; id < max; id++) { - OLD_VALUES[id] = IndicesOptions.fromByte((byte)id); - } - } - /** * @return Whether specified concrete indices should be ignored when unavailable (missing or closed) */ diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java b/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java index f1c6c36dc5cc..8337500f73aa 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Spawner.java @@ -72,9 +72,13 @@ final class Spawner implements Closeable { List paths = PluginsService.findPluginDirs(environment.modulesFile()); for (final Path modules : paths) { final PluginInfo info = PluginInfo.readFromProperties(modules); - final Path spawnPath = Platforms.nativeControllerPath(modules); + Path spawnPath = Platforms.nativeControllerPath(modules); if (!Files.isRegularFile(spawnPath)) { - continue; + // TODO: remove before release and just continue if the controller is not in the standard place + spawnPath = Platforms.fallbackNativeControllerPath(modules); + if (spawnPath == null || Files.isRegularFile(spawnPath) == false) { + continue; + } } if (!info.hasNativeController()) { final String message = String.format( diff --git a/server/src/main/java/org/elasticsearch/client/Client.java b/server/src/main/java/org/elasticsearch/client/Client.java index b2f01d92e6c7..7e7cbf1c18be 100644 --- a/server/src/main/java/org/elasticsearch/client/Client.java +++ b/server/src/main/java/org/elasticsearch/client/Client.java @@ -386,10 +386,9 @@ public interface Client extends ElasticsearchClient, Releasable { * Computes a score explanation for the specified request. * * @param index The index this explain is targeted for - * @param type The type this explain is targeted for * @param id The document identifier this explain is targeted for */ - ExplainRequestBuilder prepareExplain(String index, String type, String id); + ExplainRequestBuilder prepareExplain(String index, String id); /** * Computes a score explanation for the specified request. diff --git a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 1c9b6e593f8e..1180298f386e 100644 --- a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -585,8 +585,8 @@ public abstract class AbstractClient implements Client { } @Override - public ExplainRequestBuilder prepareExplain(String index, String type, String id) { - return new ExplainRequestBuilder(this, ExplainAction.INSTANCE, index, type, id); + public ExplainRequestBuilder prepareExplain(String index, String id) { + return new ExplainRequestBuilder(this, ExplainAction.INSTANCE, index, id); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateApplier.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateApplier.java index c339a8ed97e7..ad983f43bf2c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStateApplier.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateApplier.java @@ -28,7 +28,11 @@ import org.elasticsearch.cluster.service.ClusterService; public interface ClusterStateApplier { /** - * Called when a new cluster state ({@link ClusterChangedEvent#state()} needs to be applied + * Called when a new cluster state ({@link ClusterChangedEvent#state()} needs to be applied. The cluster state to be applied is already + * committed when this method is called, so an applier must therefore be prepared to deal with any state it receives without throwing + * an exception. Throwing an exception from an applier is very bad because it will stop the application of this state before it has + * reached all the other appliers, and will likely result in another attempt to apply the same (or very similar) cluster state which + * might continue until this node is removed from the cluster. */ void applyClusterState(ClusterChangedEvent event); } diff --git a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java index 3ce8615c4763..8a112950204c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.snapshots.Snapshot; +import org.elasticsearch.snapshots.SnapshotsService; import java.io.IOException; import java.util.ArrayList; @@ -249,20 +250,26 @@ public class SnapshotsInProgress extends AbstractNamedDiffable implement public static class ShardSnapshotStatus { private final ShardState state; private final String nodeId; + + @Nullable + private final String generation; + + @Nullable private final String reason; - public ShardSnapshotStatus(String nodeId) { - this(nodeId, ShardState.INIT); + public ShardSnapshotStatus(String nodeId, String generation) { + this(nodeId, ShardState.INIT, generation); } - public ShardSnapshotStatus(String nodeId, ShardState state) { - this(nodeId, state, null); + public ShardSnapshotStatus(String nodeId, ShardState state, String generation) { + this(nodeId, state, null, generation); } - public ShardSnapshotStatus(String nodeId, ShardState state, String reason) { + public ShardSnapshotStatus(String nodeId, ShardState state, String reason, String generation) { this.nodeId = nodeId; this.state = state; this.reason = reason; + this.generation = generation; // If the state is failed we have to have a reason for this failure assert state.failed() == false || reason != null; } @@ -270,6 +277,12 @@ public class SnapshotsInProgress extends AbstractNamedDiffable implement public ShardSnapshotStatus(StreamInput in) throws IOException { nodeId = in.readOptionalString(); state = ShardState.fromValue(in.readByte()); + if (in.getVersion().onOrAfter(SnapshotsService.SHARD_GEN_IN_REPO_DATA_VERSION)) { + generation = in.readOptionalString(); + assert generation != null || state != ShardState.SUCCESS : "Received null generation for shard state [" + state + "]"; + } else { + generation = null; + } reason = in.readOptionalString(); } @@ -281,6 +294,10 @@ public class SnapshotsInProgress extends AbstractNamedDiffable implement return nodeId; } + public String generation() { + return this.generation; + } + public String reason() { return reason; } @@ -288,6 +305,9 @@ public class SnapshotsInProgress extends AbstractNamedDiffable implement public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(nodeId); out.writeByte(state.value); + if (out.getVersion().onOrAfter(SnapshotsService.SHARD_GEN_IN_REPO_DATA_VERSION)) { + out.writeOptionalString(generation); + } out.writeOptionalString(reason); } @@ -296,8 +316,8 @@ public class SnapshotsInProgress extends AbstractNamedDiffable implement if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ShardSnapshotStatus status = (ShardSnapshotStatus) o; - return Objects.equals(nodeId, status.nodeId) && Objects.equals(reason, status.reason) && state == status.state; - + return Objects.equals(nodeId, status.nodeId) && Objects.equals(reason, status.reason) + && Objects.equals(generation, status.generation) && state == status.state; } @Override @@ -305,12 +325,13 @@ public class SnapshotsInProgress extends AbstractNamedDiffable implement int result = state != null ? state.hashCode() : 0; result = 31 * result + (nodeId != null ? nodeId.hashCode() : 0); result = 31 * result + (reason != null ? reason.hashCode() : 0); + result = 31 * result + (generation != null ? generation.hashCode() : 0); return result; } @Override public String toString() { - return "ShardSnapshotStatus[state=" + state + ", nodeId=" + nodeId + ", reason=" + reason + "]"; + return "ShardSnapshotStatus[state=" + state + ", nodeId=" + nodeId + ", reason=" + reason + ", generation=" + generation + "]"; } } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java b/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java index 38ac7a32bdba..390b7a4cbde3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/FollowersChecker.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportConnectionListener; import org.elasticsearch.transport.TransportException; @@ -113,7 +114,7 @@ public class FollowersChecker { (request, transportChannel, task) -> handleFollowerCheck(request, transportChannel)); transportService.addConnectionListener(new TransportConnectionListener() { @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { handleDisconnectedNode(node); } }); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java b/server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java index 703c08bf260c..93029d2d5339 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/LeaderChecker.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.NodeDisconnectedException; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportConnectionListener; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; @@ -62,7 +63,7 @@ public class LeaderChecker { private static final Logger logger = LogManager.getLogger(LeaderChecker.class); - public static final String LEADER_CHECK_ACTION_NAME = "internal:coordination/fault_detection/leader_check"; + static final String LEADER_CHECK_ACTION_NAME = "internal:coordination/fault_detection/leader_check"; // the time between checks sent to the leader public static final Setting LEADER_CHECK_INTERVAL_SETTING = @@ -88,7 +89,7 @@ public class LeaderChecker { private volatile DiscoveryNodes discoveryNodes; - public LeaderChecker(final Settings settings, final TransportService transportService, final Consumer onLeaderFailure) { + LeaderChecker(final Settings settings, final TransportService transportService, final Consumer onLeaderFailure) { leaderCheckInterval = LEADER_CHECK_INTERVAL_SETTING.get(settings); leaderCheckTimeout = LEADER_CHECK_TIMEOUT_SETTING.get(settings); leaderCheckRetryCount = LEADER_CHECK_RETRY_COUNT_SETTING.get(settings); @@ -103,7 +104,7 @@ public class LeaderChecker { transportService.addConnectionListener(new TransportConnectionListener() { @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { handleDisconnectedNode(node); } }); @@ -119,7 +120,7 @@ public class LeaderChecker { * * @param leader the node to be checked as leader, or null if checks should be disabled */ - public void updateLeader(@Nullable final DiscoveryNode leader) { + void updateLeader(@Nullable final DiscoveryNode leader) { assert transportService.getLocalNode().equals(leader) == false; final CheckScheduler checkScheduler; if (leader != null) { @@ -139,12 +140,8 @@ public class LeaderChecker { /** * Update the "known" discovery nodes. Should be called on the leader before a new cluster state is published to reflect the new * publication targets, and also called if a leader becomes a non-leader. - * TODO if heartbeats can make nodes become followers then this needs to be called before a heartbeat is sent to a new node too. - *

- * isLocalNodeElectedMaster() should reflect whether this node is a leader, and nodeExists() - * should indicate whether nodes are known publication targets or not. */ - public void setCurrentNodes(DiscoveryNodes discoveryNodes) { + void setCurrentNodes(DiscoveryNodes discoveryNodes) { logger.trace("setCurrentNodes: {}", discoveryNodes); this.discoveryNodes = discoveryNodes; } @@ -159,11 +156,13 @@ public class LeaderChecker { assert discoveryNodes != null; if (discoveryNodes.isLocalNodeElectedMaster() == false) { - logger.debug("non-master handling {}", request); - throw new CoordinationStateRejectedException("non-leader rejecting leader check"); + logger.debug("rejecting leader check on non-master {}", request); + throw new CoordinationStateRejectedException( + "rejecting leader check from [" + request.getSender() + "] sent to a node that is no longer the master"); } else if (discoveryNodes.nodeExists(request.getSender()) == false) { - logger.debug("leader check from unknown node: {}", request); - throw new CoordinationStateRejectedException("leader check from unknown node"); + logger.debug("rejecting leader check from removed node: {}", request); + throw new CoordinationStateRejectedException( + "rejecting leader check since [" + request.getSender() + "] has been removed from the cluster"); } else { logger.trace("handling {}", request); } @@ -303,15 +302,15 @@ public class LeaderChecker { } } - public static class LeaderCheckRequest extends TransportRequest { + static class LeaderCheckRequest extends TransportRequest { private final DiscoveryNode sender; - public LeaderCheckRequest(final DiscoveryNode sender) { + LeaderCheckRequest(final DiscoveryNode sender) { this.sender = sender; } - public LeaderCheckRequest(final StreamInput in) throws IOException { + LeaderCheckRequest(final StreamInput in) throws IOException { super(in); sender = new DiscoveryNode(in); } diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java index 0decde35f0a6..574919f6751a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java @@ -390,7 +390,7 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements return true; } - protected void runTask(UpdateTask task) { + private void runTask(UpdateTask task) { if (!lifecycle.started()) { logger.debug("processing [{}]: ignoring, cluster applier service not started", task.source); return; @@ -447,6 +447,9 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements "failed to apply updated cluster state in [{}]:\nversion [{}], uuid [{}], source [{}]", executionTime, newClusterState.version(), newClusterState.stateUUID(), task.source), e); } + // failing to apply a cluster state with an exception indicates a bug in validation or in one of the appliers; if we + // continue we will retry with the same cluster state but that might not help. + assert applicationMayFail(); task.listener.onFailure(task.source, e); } } @@ -661,4 +664,8 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements return threadPool.relativeTimeInMillis(); } + // overridden by tests that need to check behaviour in the event of an application failure + protected boolean applicationMayFail() { + return false; + } } diff --git a/server/src/main/java/org/elasticsearch/common/bytes/ByteBufferReference.java b/server/src/main/java/org/elasticsearch/common/bytes/ByteBufferReference.java index a36a19edc7e2..d696f060802f 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/ByteBufferReference.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/ByteBufferReference.java @@ -20,9 +20,9 @@ package org.elasticsearch.common.bytes; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.FutureObjects; import java.nio.ByteBuffer; +import java.util.Objects; /** * This is a {@link BytesReference} backed by a {@link ByteBuffer}. The byte buffer can either be a heap or @@ -58,7 +58,7 @@ public class ByteBufferReference extends BytesReference { @Override public BytesReference slice(int from, int length) { - FutureObjects.checkFromIndexSize(from, length, this.length); + Objects.checkFromIndexSize(from, length, this.length); buffer.position(from); buffer.limit(from + length); ByteBufferReference newByteBuffer = new ByteBufferReference(buffer); diff --git a/server/src/main/java/org/elasticsearch/common/bytes/BytesArray.java b/server/src/main/java/org/elasticsearch/common/bytes/BytesArray.java index de21acc487df..9761ad0c42c6 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/BytesArray.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/BytesArray.java @@ -21,6 +21,8 @@ package org.elasticsearch.common.bytes; import org.apache.lucene.util.BytesRef; +import java.util.Objects; + public final class BytesArray extends BytesReference { public static final BytesArray EMPTY = new BytesArray(BytesRef.EMPTY_BYTES, 0, 0); @@ -67,10 +69,7 @@ public final class BytesArray extends BytesReference { @Override public BytesReference slice(int from, int length) { - if (from < 0 || (from + length) > this.length) { - throw new IllegalArgumentException("can't slice a buffer with length [" + this.length + - "], with slice parameters from [" + from + "], length [" + length + "]"); - } + Objects.checkFromIndexSize(from, length, this.length); return new BytesArray(bytes, offset + from, length); } diff --git a/server/src/main/java/org/elasticsearch/common/bytes/CompositeBytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/CompositeBytesReference.java index 10bc959db337..4845102b89bc 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/CompositeBytesReference.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/CompositeBytesReference.java @@ -22,7 +22,6 @@ package org.elasticsearch.common.bytes; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefIterator; -import org.apache.lucene.util.FutureObjects; import org.apache.lucene.util.RamUsageEstimator; import java.io.IOException; @@ -78,7 +77,7 @@ public final class CompositeBytesReference extends BytesReference { @Override public BytesReference slice(int from, int length) { - FutureObjects.checkFromIndexSize(from, length, this.length); + Objects.checkFromIndexSize(from, length, this.length); if (length == 0) { return BytesArray.EMPTY; diff --git a/server/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java index f15b3b9cf329..9f2619cd1aa7 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/PagedBytesReference.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.util.ByteArray; import org.elasticsearch.common.util.PageCacheRecycler; import java.io.IOException; +import java.util.Objects; /** * A page based bytes reference, internally holding the bytes in a paged @@ -60,10 +61,7 @@ public class PagedBytesReference extends BytesReference { @Override public BytesReference slice(int from, int length) { - if (from < 0 || (from + length) > length()) { - throw new IllegalArgumentException("can't slice a buffer with length [" + length() + - "], with slice parameters from [" + from + "], length [" + length + "]"); - } + Objects.checkFromIndexSize(from, length, this.length); return new PagedBytesReference(byteArray, offset + from, length); } diff --git a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 998e4e123a97..6d6a39d1e3f7 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -193,7 +193,6 @@ public abstract class AbstractScopedSettings { } catch (Exception ex) { logger.warn("failed to apply settings", ex); throw ex; - } finally { } return lastSettingsApplied = newSettings; } diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 92332225ed5c..a6f76a486a7f 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -71,6 +71,7 @@ import org.elasticsearch.discovery.SettingsBasedSeedHostsProvider; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.gateway.IncrementalClusterStateWriter; import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; @@ -226,6 +227,7 @@ public final class ClusterSettings extends AbstractScopedSettings { GatewayService.RECOVER_AFTER_MASTER_NODES_SETTING, GatewayService.RECOVER_AFTER_NODES_SETTING, GatewayService.RECOVER_AFTER_TIME_SETTING, + IncrementalClusterStateWriter.SLOW_WRITE_LOGGING_THRESHOLD, NetworkModule.HTTP_DEFAULT_TYPE_SETTING, NetworkModule.TRANSPORT_DEFAULT_TYPE_SETTING, NetworkModule.HTTP_TYPE_SETTING, diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ListenableFuture.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ListenableFuture.java index 9a47de43fa6d..67088eac91f8 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ListenableFuture.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ListenableFuture.java @@ -43,12 +43,25 @@ public final class ListenableFuture extends BaseFuture implements ActionLi private volatile boolean done = false; private final List, ExecutorService>> listeners = new ArrayList<>(); + /** * Adds a listener to this future. If the future has not yet completed, the listener will be * notified of a response or exception in a runnable submitted to the ExecutorService provided. * If the future has completed, the listener will be notified immediately without forking to * a different thread. */ + public void addListener(ActionListener listener, ExecutorService executor) { + addListener(listener, executor, null); + } + + /** + * Adds a listener to this future. If the future has not yet completed, the listener will be + * notified of a response or exception in a runnable submitted to the ExecutorService provided. + * If the future has completed, the listener will be notified immediately without forking to + * a different thread. + * + * It will apply the provided ThreadContext (if not null) when executing the listening. + */ public void addListener(ActionListener listener, ExecutorService executor, ThreadContext threadContext) { if (done) { // run the callback directly, we don't hold the lock and don't need to fork! diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index 9abba46ad17e..644a1eae644a 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -29,6 +29,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateApplier; import org.elasticsearch.cluster.coordination.CoordinationState.PersistedState; import org.elasticsearch.cluster.coordination.InMemoryPersistedState; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -36,8 +37,6 @@ import org.elasticsearch.cluster.metadata.Manifest; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.RoutingNode; -import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; @@ -48,63 +47,55 @@ import org.elasticsearch.plugins.MetaDataUpgrader; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; -import java.util.List; import java.util.Map; -import java.util.Set; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.UnaryOperator; /** - * This class is responsible for storing/retrieving metadata to/from disk. - * When instance of this class is created, constructor ensures that this version is compatible with state stored on disk and performs - * state upgrade if necessary. Also it checks that atomic move is supported on the filesystem level, because it's a must for metadata - * store algorithm. - * Please note that the state being loaded when constructing the instance of this class is NOT the state that will be used as a - * {@link ClusterState#metaData()}. Instead when node is starting up, it calls {@link #getMetaData()} method and if this node is - * elected as master, it requests metaData from other master eligible nodes. After that, master node performs re-conciliation on the - * gathered results, re-creates {@link ClusterState} and broadcasts this state to other nodes in the cluster. + * Loads (and maybe upgrades) cluster metadata at startup, and persistently stores cluster metadata for future restarts. + * + * When started, ensures that this version is compatible with the state stored on disk, and performs a state upgrade if necessary. Note that + * the state being loaded when constructing the instance of this class is not necessarily the state that will be used as {@link + * ClusterState#metaData()} because it might be stale or incomplete. Master-eligible nodes must perform an election to find a complete and + * non-stale state, and master-ineligible nodes receive the real cluster state from the elected master after joining the cluster. */ -public class GatewayMetaState implements PersistedState { - protected static final Logger logger = LogManager.getLogger(GatewayMetaState.class); +public class GatewayMetaState { + private static final Logger logger = LogManager.getLogger(GatewayMetaState.class); - private final MetaStateService metaStateService; - private final Settings settings; - - // On master-eligible Zen2 nodes, we use this very object for the PersistedState (so that the state is actually persisted); on other - // nodes we use an InMemoryPersistedState instead and persist using a cluster applier if needed. In all cases it's an error to try and - // use this object as a PersistedState before calling start(). TODO stop implementing PersistedState at the top level. + // Set by calling start() private final SetOnce persistedState = new SetOnce<>(); - // on master-eligible nodes we call updateClusterState under the Coordinator's mutex; on master-ineligible data nodes we call - // updateClusterState on the (unique) cluster applier thread; on other nodes we never call updateClusterState. In all cases there's no - // need to synchronize access to these variables. - protected Manifest previousManifest; - protected ClusterState previousClusterState; - protected boolean incrementalWrite; - - public GatewayMetaState(Settings settings, MetaStateService metaStateService) { - this.settings = settings; - this.metaStateService = metaStateService; + public PersistedState getPersistedState() { + final PersistedState persistedState = this.persistedState.get(); + assert persistedState != null : "not started"; + return persistedState; } - public void start(TransportService transportService, ClusterService clusterService, - MetaDataIndexUpgradeService metaDataIndexUpgradeService, MetaDataUpgrader metaDataUpgrader) { - assert previousClusterState == null : "should only start once, but already have " + previousClusterState; + public MetaData getMetaData() { + return getPersistedState().getLastAcceptedState().metaData(); + } + + public void start(Settings settings, TransportService transportService, ClusterService clusterService, + MetaStateService metaStateService, MetaDataIndexUpgradeService metaDataIndexUpgradeService, + MetaDataUpgrader metaDataUpgrader) { + assert persistedState.get() == null : "should only start once, but already have " + persistedState.get(); + + final Tuple manifestClusterStateTuple; try { - upgradeMetaData(metaDataIndexUpgradeService, metaDataUpgrader); - initializeClusterState(ClusterName.CLUSTER_NAME_SETTING.get(settings)); + upgradeMetaData(settings, metaStateService, metaDataIndexUpgradeService, metaDataUpgrader); + manifestClusterStateTuple = loadStateAndManifest(ClusterName.CLUSTER_NAME_SETTING.get(settings), metaStateService); } catch (IOException e) { throw new ElasticsearchException("failed to load metadata", e); } - incrementalWrite = false; - applyClusterStateUpdaters(transportService, clusterService); + final IncrementalClusterStateWriter incrementalClusterStateWriter + = new IncrementalClusterStateWriter(settings, clusterService.getClusterSettings(), metaStateService, + manifestClusterStateTuple.v1(), + prepareInitialClusterState(transportService, clusterService, manifestClusterStateTuple.v2()), + transportService.getThreadPool()::relativeTimeInMillis); if (DiscoveryNode.isMasterNode(settings) == false) { if (DiscoveryNode.isDataNode(settings)) { // Master-eligible nodes persist index metadata for all indices regardless of whether they hold any shards or not. It's @@ -121,43 +112,36 @@ public class GatewayMetaState implements PersistedState { // state on master-ineligible data nodes is mostly ignored - it's only there to support dangling index imports, which is // inherently unsafe anyway. Thus we can safely delay metadata writes on master-ineligible data nodes until applying the // cluster state, which is what this does: - clusterService.addLowPriorityApplier(this::applyClusterState); + clusterService.addLowPriorityApplier(new GatewayClusterApplier(incrementalClusterStateWriter)); } - persistedState.set(new InMemoryPersistedState(getCurrentTerm(), getLastAcceptedState())); + + // Master-ineligible nodes do not need to persist the cluster state when accepting it because they are not in the voting + // configuration, so it's ok if they have a stale or incomplete cluster state when restarted. We track the latest cluster state + // in memory instead. + persistedState.set(new InMemoryPersistedState(manifestClusterStateTuple.v1().getCurrentTerm(), manifestClusterStateTuple.v2())); } else { - persistedState.set(this); + // Master-ineligible nodes must persist the cluster state when accepting it because they must reload the (complete, fresh) + // last-accepted cluster state when restarted. + persistedState.set(new GatewayPersistedState(incrementalClusterStateWriter)); } } - private void initializeClusterState(ClusterName clusterName) throws IOException { - long startNS = System.nanoTime(); - Tuple manifestAndMetaData = metaStateService.loadFullState(); - previousManifest = manifestAndMetaData.v1(); - - final MetaData metaData = manifestAndMetaData.v2(); - - previousClusterState = ClusterState.builder(clusterName) - .version(previousManifest.getClusterStateVersion()) - .metaData(metaData).build(); - - logger.debug("took {} to load state", TimeValue.timeValueMillis(TimeValue.nsecToMSec(System.nanoTime() - startNS))); - } - - protected void applyClusterStateUpdaters(TransportService transportService, ClusterService clusterService) { - assert previousClusterState.nodes().getLocalNode() == null : "applyClusterStateUpdaters must only be called once"; + // exposed so it can be overridden by tests + ClusterState prepareInitialClusterState(TransportService transportService, ClusterService clusterService, ClusterState clusterState) { + assert clusterState.nodes().getLocalNode() == null : "prepareInitialClusterState must only be called once"; assert transportService.getLocalNode() != null : "transport service is not yet started"; - - previousClusterState = Function.identity() + return Function.identity() .andThen(ClusterStateUpdaters::addStateNotRecoveredBlock) .andThen(state -> ClusterStateUpdaters.setLocalNode(state, transportService.getLocalNode())) .andThen(state -> ClusterStateUpdaters.upgradeAndArchiveUnknownOrInvalidSettings(state, clusterService.getClusterSettings())) .andThen(ClusterStateUpdaters::recoverClusterBlocks) - .apply(previousClusterState); + .apply(clusterState); } - protected void upgradeMetaData(MetaDataIndexUpgradeService metaDataIndexUpgradeService, MetaDataUpgrader metaDataUpgrader) - throws IOException { - if (isMasterOrDataNode()) { + // exposed so it can be overridden by tests + void upgradeMetaData(Settings settings, MetaStateService metaStateService, MetaDataIndexUpgradeService metaDataIndexUpgradeService, + MetaDataUpgrader metaDataUpgrader) throws IOException { + if (isMasterOrDataNode(settings)) { try { final Tuple metaStateAndData = metaStateService.loadFullState(); final Manifest manifest = metaStateAndData.v1(); @@ -170,7 +154,8 @@ public class GatewayMetaState implements PersistedState { // if there is manifest file, it means metadata is properly persisted to all data paths // if there is no manifest file (upgrade from 6.x to 7.x) metadata might be missing on some data paths, // but anyway we will re-write it as soon as we receive first ClusterState - final AtomicClusterStateWriter writer = new AtomicClusterStateWriter(metaStateService, manifest); + final IncrementalClusterStateWriter.AtomicClusterStateWriter writer + = new IncrementalClusterStateWriter.AtomicClusterStateWriter(metaStateService, manifest); final MetaData upgradedMetaData = upgradeMetaData(metaData, metaDataIndexUpgradeService, metaDataUpgrader); final long globalStateGeneration; @@ -198,233 +183,25 @@ public class GatewayMetaState implements PersistedState { } } - private boolean isMasterOrDataNode() { + private static Tuple loadStateAndManifest(ClusterName clusterName, + MetaStateService metaStateService) throws IOException { + final long startNS = System.nanoTime(); + final Tuple manifestAndMetaData = metaStateService.loadFullState(); + final Manifest manifest = manifestAndMetaData.v1(); + + final ClusterState clusterState = ClusterState.builder(clusterName) + .version(manifest.getClusterStateVersion()) + .metaData(manifestAndMetaData.v2()).build(); + + logger.debug("took {} to load state", TimeValue.timeValueMillis(TimeValue.nsecToMSec(System.nanoTime() - startNS))); + + return Tuple.tuple(manifest, clusterState); + } + + private static boolean isMasterOrDataNode(Settings settings) { return DiscoveryNode.isMasterNode(settings) || DiscoveryNode.isDataNode(settings); } - public PersistedState getPersistedState() { - final PersistedState persistedState = this.persistedState.get(); - assert persistedState != null : "not started"; - return persistedState; - } - - public MetaData getMetaData() { - return previousClusterState.metaData(); - } - - private void applyClusterState(ClusterChangedEvent event) { - assert isMasterOrDataNode(); - - if (event.state().blocks().disableStatePersistence()) { - incrementalWrite = false; - return; - } - - try { - // Hack: This is to ensure that non-master-eligible Zen2 nodes always store a current term - // that's higher than the last accepted term. - // TODO: can we get rid of this hack? - if (event.state().term() > getCurrentTerm()) { - innerSetCurrentTerm(event.state().term()); - } - - updateClusterState(event.state(), event.previousState()); - incrementalWrite = true; - } catch (WriteStateException e) { - logger.warn("Exception occurred when storing new meta data", e); - } - } - - @Override - public long getCurrentTerm() { - return previousManifest.getCurrentTerm(); - } - - @Override - public ClusterState getLastAcceptedState() { - assert previousClusterState.nodes().getLocalNode() != null : "Cluster state is not fully built yet"; - return previousClusterState; - } - - @Override - public void setCurrentTerm(long currentTerm) { - try { - innerSetCurrentTerm(currentTerm); - } catch (WriteStateException e) { - logger.error(new ParameterizedMessage("Failed to set current term to {}", currentTerm), e); - e.rethrowAsErrorOrUncheckedException(); - } - } - - private void innerSetCurrentTerm(long currentTerm) throws WriteStateException { - Manifest manifest = new Manifest(currentTerm, previousManifest.getClusterStateVersion(), previousManifest.getGlobalGeneration(), - new HashMap<>(previousManifest.getIndexGenerations())); - metaStateService.writeManifestAndCleanup("current term changed", manifest); - previousManifest = manifest; - } - - @Override - public void setLastAcceptedState(ClusterState clusterState) { - try { - incrementalWrite = previousClusterState.term() == clusterState.term(); - updateClusterState(clusterState, previousClusterState); - } catch (WriteStateException e) { - logger.error(new ParameterizedMessage("Failed to set last accepted state with version {}", clusterState.version()), e); - e.rethrowAsErrorOrUncheckedException(); - } - } - - /** - * This class is used to write changed global {@link MetaData}, {@link IndexMetaData} and {@link Manifest} to disk. - * This class delegates write* calls to corresponding write calls in {@link MetaStateService} and - * additionally it keeps track of cleanup actions to be performed if transaction succeeds or fails. - */ - static class AtomicClusterStateWriter { - private static final String FINISHED_MSG = "AtomicClusterStateWriter is finished"; - private final List commitCleanupActions; - private final List rollbackCleanupActions; - private final Manifest previousManifest; - private final MetaStateService metaStateService; - private boolean finished; - - AtomicClusterStateWriter(MetaStateService metaStateService, Manifest previousManifest) { - this.metaStateService = metaStateService; - assert previousManifest != null; - this.previousManifest = previousManifest; - this.commitCleanupActions = new ArrayList<>(); - this.rollbackCleanupActions = new ArrayList<>(); - this.finished = false; - } - - long writeGlobalState(String reason, MetaData metaData) throws WriteStateException { - assert finished == false : FINISHED_MSG; - try { - rollbackCleanupActions.add(() -> metaStateService.cleanupGlobalState(previousManifest.getGlobalGeneration())); - long generation = metaStateService.writeGlobalState(reason, metaData); - commitCleanupActions.add(() -> metaStateService.cleanupGlobalState(generation)); - return generation; - } catch (WriteStateException e) { - rollback(); - throw e; - } - } - - long writeIndex(String reason, IndexMetaData metaData) throws WriteStateException { - assert finished == false : FINISHED_MSG; - try { - Index index = metaData.getIndex(); - Long previousGeneration = previousManifest.getIndexGenerations().get(index); - if (previousGeneration != null) { - // we prefer not to clean-up index metadata in case of rollback, - // if it's not referenced by previous manifest file - // not to break dangling indices functionality - rollbackCleanupActions.add(() -> metaStateService.cleanupIndex(index, previousGeneration)); - } - long generation = metaStateService.writeIndex(reason, metaData); - commitCleanupActions.add(() -> metaStateService.cleanupIndex(index, generation)); - return generation; - } catch (WriteStateException e) { - rollback(); - throw e; - } - } - - void writeManifestAndCleanup(String reason, Manifest manifest) throws WriteStateException { - assert finished == false : FINISHED_MSG; - try { - metaStateService.writeManifestAndCleanup(reason, manifest); - commitCleanupActions.forEach(Runnable::run); - finished = true; - } catch (WriteStateException e) { - // if Manifest write results in dirty WriteStateException it's not safe to remove - // new metadata files, because if Manifest was actually written to disk and its deletion - // fails it will reference these new metadata files. - // In the future, we might decide to add more fine grained check to understand if after - // WriteStateException Manifest deletion has actually failed. - if (e.isDirty() == false) { - rollback(); - } - throw e; - } - } - - void rollback() { - rollbackCleanupActions.forEach(Runnable::run); - finished = true; - } - } - - /** - * Updates manifest and meta data on disk. - * - * @param newState new {@link ClusterState} - * @param previousState previous {@link ClusterState} - * - * @throws WriteStateException if exception occurs. See also {@link WriteStateException#isDirty()}. - */ - private void updateClusterState(ClusterState newState, ClusterState previousState) - throws WriteStateException { - MetaData newMetaData = newState.metaData(); - - final AtomicClusterStateWriter writer = new AtomicClusterStateWriter(metaStateService, previousManifest); - long globalStateGeneration = writeGlobalState(writer, newMetaData); - Map indexGenerations = writeIndicesMetadata(writer, newState, previousState); - Manifest manifest = new Manifest(previousManifest.getCurrentTerm(), newState.version(), globalStateGeneration, indexGenerations); - writeManifest(writer, manifest); - - previousManifest = manifest; - previousClusterState = newState; - } - - private void writeManifest(AtomicClusterStateWriter writer, Manifest manifest) throws WriteStateException { - if (manifest.equals(previousManifest) == false) { - writer.writeManifestAndCleanup("changed", manifest); - } - } - - private Map writeIndicesMetadata(AtomicClusterStateWriter writer, ClusterState newState, ClusterState previousState) - throws WriteStateException { - Map previouslyWrittenIndices = previousManifest.getIndexGenerations(); - Set relevantIndices = getRelevantIndices(newState, previousState, previouslyWrittenIndices.keySet()); - - Map newIndices = new HashMap<>(); - - MetaData previousMetaData = incrementalWrite ? previousState.metaData() : null; - Iterable actions = resolveIndexMetaDataActions(previouslyWrittenIndices, relevantIndices, previousMetaData, - newState.metaData()); - - for (IndexMetaDataAction action : actions) { - long generation = action.execute(writer); - newIndices.put(action.getIndex(), generation); - } - - return newIndices; - } - - private long writeGlobalState(AtomicClusterStateWriter writer, MetaData newMetaData) - throws WriteStateException { - if (incrementalWrite == false || MetaData.isGlobalStateEquals(previousClusterState.metaData(), newMetaData) == false) { - return writer.writeGlobalState("changed", newMetaData); - } - return previousManifest.getGlobalGeneration(); - } - - public static Set getRelevantIndices(ClusterState state, ClusterState previousState, Set previouslyWrittenIndices) { - Set relevantIndices; - if (isDataOnlyNode(state)) { - relevantIndices = getRelevantIndicesOnDataOnlyNode(state, previousState, previouslyWrittenIndices); - } else if (state.nodes().getLocalNode().isMasterNode()) { - relevantIndices = getRelevantIndicesForMasterEligibleNode(state); - } else { - relevantIndices = Collections.emptySet(); - } - return relevantIndices; - } - - private static boolean isDataOnlyNode(ClusterState state) { - return state.nodes().getLocalNode().isMasterNode() == false && state.nodes().getLocalNode().isDataNode(); - } - /** * Elasticsearch 2.0 removed several deprecated features and as well as support for Lucene 3.x. This method calls * {@link MetaDataIndexUpgradeService} to makes sure that indices are compatible with the current version. The @@ -480,160 +257,81 @@ public class GatewayMetaState implements PersistedState { return false; } - /** - * Returns list of {@link IndexMetaDataAction} for each relevant index. - * For each relevant index there are 3 options: - *

    - *
  1. - * {@link KeepPreviousGeneration} - index metadata is already stored to disk and index metadata version is not changed, no - * action is required. - *
  2. - *
  3. - * {@link WriteNewIndexMetaData} - there is no index metadata on disk and index metadata for this index should be written. - *
  4. - *
  5. - * {@link WriteChangedIndexMetaData} - index metadata is already on disk, but index metadata version has changed. Updated - * index metadata should be written to disk. - *
  6. - *
- * - * @param previouslyWrittenIndices A list of indices for which the state was already written before - * @param relevantIndices The list of indices for which state should potentially be written - * @param previousMetaData The last meta data we know of - * @param newMetaData The new metadata - * @return list of {@link IndexMetaDataAction} for each relevant index. - */ - public static List resolveIndexMetaDataActions(Map previouslyWrittenIndices, - Set relevantIndices, - MetaData previousMetaData, - MetaData newMetaData) { - List actions = new ArrayList<>(); - for (Index index : relevantIndices) { - IndexMetaData newIndexMetaData = newMetaData.getIndexSafe(index); - IndexMetaData previousIndexMetaData = previousMetaData == null ? null : previousMetaData.index(index); - if (previouslyWrittenIndices.containsKey(index) == false || previousIndexMetaData == null) { - actions.add(new WriteNewIndexMetaData(newIndexMetaData)); - } else if (previousIndexMetaData.getVersion() != newIndexMetaData.getVersion()) { - actions.add(new WriteChangedIndexMetaData(previousIndexMetaData, newIndexMetaData)); - } else { - actions.add(new KeepPreviousGeneration(index, previouslyWrittenIndices.get(index))); + private static class GatewayClusterApplier implements ClusterStateApplier { + + private final IncrementalClusterStateWriter incrementalClusterStateWriter; + + private GatewayClusterApplier(IncrementalClusterStateWriter incrementalClusterStateWriter) { + this.incrementalClusterStateWriter = incrementalClusterStateWriter; + } + + @Override + public void applyClusterState(ClusterChangedEvent event) { + if (event.state().blocks().disableStatePersistence()) { + incrementalClusterStateWriter.setIncrementalWrite(false); + return; + } + + try { + // Hack: This is to ensure that non-master-eligible Zen2 nodes always store a current term + // that's higher than the last accepted term. + // TODO: can we get rid of this hack? + if (event.state().term() > incrementalClusterStateWriter.getPreviousManifest().getCurrentTerm()) { + incrementalClusterStateWriter.setCurrentTerm(event.state().term()); + } + + incrementalClusterStateWriter.updateClusterState(event.state(), event.previousState()); + incrementalClusterStateWriter.setIncrementalWrite(true); + } catch (WriteStateException e) { + logger.warn("Exception occurred when storing new meta data", e); } } - return actions; + } - private static Set getRelevantIndicesOnDataOnlyNode(ClusterState state, ClusterState previousState, Set - previouslyWrittenIndices) { - RoutingNode newRoutingNode = state.getRoutingNodes().node(state.nodes().getLocalNodeId()); - if (newRoutingNode == null) { - throw new IllegalStateException("cluster state does not contain this node - cannot write index meta state"); + private static class GatewayPersistedState implements PersistedState { + + private final IncrementalClusterStateWriter incrementalClusterStateWriter; + + GatewayPersistedState(IncrementalClusterStateWriter incrementalClusterStateWriter) { + this.incrementalClusterStateWriter = incrementalClusterStateWriter; } - Set indices = new HashSet<>(); - for (ShardRouting routing : newRoutingNode) { - indices.add(routing.index()); + + @Override + public long getCurrentTerm() { + return incrementalClusterStateWriter.getPreviousManifest().getCurrentTerm(); } - // we have to check the meta data also: closed indices will not appear in the routing table, but we must still write the state if - // we have it written on disk previously - for (IndexMetaData indexMetaData : state.metaData()) { - boolean isOrWasClosed = indexMetaData.getState().equals(IndexMetaData.State.CLOSE); - // if the index is open we might still have to write the state if it just transitioned from closed to open - // so we have to check for that as well. - IndexMetaData previousMetaData = previousState.metaData().index(indexMetaData.getIndex()); - if (previousMetaData != null) { - isOrWasClosed = isOrWasClosed || previousMetaData.getState().equals(IndexMetaData.State.CLOSE); - } - if (previouslyWrittenIndices.contains(indexMetaData.getIndex()) && isOrWasClosed) { - indices.add(indexMetaData.getIndex()); + + @Override + public ClusterState getLastAcceptedState() { + final ClusterState previousClusterState = incrementalClusterStateWriter.getPreviousClusterState(); + assert previousClusterState.nodes().getLocalNode() != null : "Cluster state is not fully built yet"; + return previousClusterState; + } + + @Override + public void setCurrentTerm(long currentTerm) { + try { + incrementalClusterStateWriter.setCurrentTerm(currentTerm); + } catch (WriteStateException e) { + logger.error(new ParameterizedMessage("Failed to set current term to {}", currentTerm), e); + e.rethrowAsErrorOrUncheckedException(); } } - return indices; - } - - private static Set getRelevantIndicesForMasterEligibleNode(ClusterState state) { - Set relevantIndices = new HashSet<>(); - // we have to iterate over the metadata to make sure we also capture closed indices - for (IndexMetaData indexMetaData : state.metaData()) { - relevantIndices.add(indexMetaData.getIndex()); - } - return relevantIndices; - } - - /** - * Action to perform with index metadata. - */ - public interface IndexMetaDataAction { - /** - * @return index for index metadata. - */ - Index getIndex(); - - /** - * Executes this action using provided {@link AtomicClusterStateWriter}. - * - * @return new index metadata state generation, to be used in manifest file. - * @throws WriteStateException if exception occurs. - */ - long execute(AtomicClusterStateWriter writer) throws WriteStateException; - } - - public static class KeepPreviousGeneration implements IndexMetaDataAction { - private final Index index; - private final long generation; - - KeepPreviousGeneration(Index index, long generation) { - this.index = index; - this.generation = generation; - } @Override - public Index getIndex() { - return index; + public void setLastAcceptedState(ClusterState clusterState) { + try { + final ClusterState previousClusterState = incrementalClusterStateWriter.getPreviousClusterState(); + incrementalClusterStateWriter.setIncrementalWrite(previousClusterState.term() == clusterState.term()); + incrementalClusterStateWriter.updateClusterState(clusterState, previousClusterState); + } catch (WriteStateException e) { + logger.error(new ParameterizedMessage("Failed to set last accepted state with version {}", clusterState.version()), e); + e.rethrowAsErrorOrUncheckedException(); + } } - @Override - public long execute(AtomicClusterStateWriter writer) { - return generation; - } } - public static class WriteNewIndexMetaData implements IndexMetaDataAction { - private final IndexMetaData indexMetaData; - - WriteNewIndexMetaData(IndexMetaData indexMetaData) { - this.indexMetaData = indexMetaData; - } - - @Override - public Index getIndex() { - return indexMetaData.getIndex(); - } - - @Override - public long execute(AtomicClusterStateWriter writer) throws WriteStateException { - return writer.writeIndex("freshly created", indexMetaData); - } - } - - public static class WriteChangedIndexMetaData implements IndexMetaDataAction { - private final IndexMetaData newIndexMetaData; - private final IndexMetaData oldIndexMetaData; - - WriteChangedIndexMetaData(IndexMetaData oldIndexMetaData, IndexMetaData newIndexMetaData) { - this.oldIndexMetaData = oldIndexMetaData; - this.newIndexMetaData = newIndexMetaData; - } - - @Override - public Index getIndex() { - return newIndexMetaData.getIndex(); - } - - @Override - public long execute(AtomicClusterStateWriter writer) throws WriteStateException { - return writer.writeIndex( - "version changed from [" + oldIndexMetaData.getVersion() + "] to [" + newIndexMetaData.getVersion() + "]", - newIndexMetaData); - } - } } diff --git a/server/src/main/java/org/elasticsearch/gateway/IncrementalClusterStateWriter.java b/server/src/main/java/org/elasticsearch/gateway/IncrementalClusterStateWriter.java new file mode 100644 index 000000000000..d015bcc5b6c2 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/gateway/IncrementalClusterStateWriter.java @@ -0,0 +1,441 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gateway; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.Manifest; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.Index; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.LongSupplier; + +/** + * Tracks the metadata written to disk, allowing updated metadata to be written incrementally (i.e. only writing out the changed metadata). + */ +public class IncrementalClusterStateWriter { + + private static final Logger logger = LogManager.getLogger(IncrementalClusterStateWriter.class); + + public static final Setting SLOW_WRITE_LOGGING_THRESHOLD = Setting.timeSetting("gateway.slow_write_logging_threshold", + TimeValue.timeValueSeconds(10), TimeValue.ZERO, Setting.Property.NodeScope, Setting.Property.Dynamic); + + private final MetaStateService metaStateService; + + // On master-eligible nodes we call updateClusterState under the Coordinator's mutex; on master-ineligible data nodes we call + // updateClusterState on the (unique) cluster applier thread; on other nodes we never call updateClusterState. In all cases there's + // no need to synchronize access to these fields. + private Manifest previousManifest; + private ClusterState previousClusterState; + private final LongSupplier relativeTimeMillisSupplier; + private boolean incrementalWrite; + + private volatile TimeValue slowWriteLoggingThreshold; + + IncrementalClusterStateWriter(Settings settings, ClusterSettings clusterSettings, MetaStateService metaStateService, Manifest manifest, + ClusterState clusterState, LongSupplier relativeTimeMillisSupplier) { + this.metaStateService = metaStateService; + this.previousManifest = manifest; + this.previousClusterState = clusterState; + this.relativeTimeMillisSupplier = relativeTimeMillisSupplier; + this.incrementalWrite = false; + this.slowWriteLoggingThreshold = SLOW_WRITE_LOGGING_THRESHOLD.get(settings); + clusterSettings.addSettingsUpdateConsumer(SLOW_WRITE_LOGGING_THRESHOLD, this::setSlowWriteLoggingThreshold); + } + + private void setSlowWriteLoggingThreshold(TimeValue slowWriteLoggingThreshold) { + this.slowWriteLoggingThreshold = slowWriteLoggingThreshold; + } + + void setCurrentTerm(long currentTerm) throws WriteStateException { + Manifest manifest = new Manifest(currentTerm, previousManifest.getClusterStateVersion(), previousManifest.getGlobalGeneration(), + new HashMap<>(previousManifest.getIndexGenerations())); + metaStateService.writeManifestAndCleanup("current term changed", manifest); + previousManifest = manifest; + } + + Manifest getPreviousManifest() { + return previousManifest; + } + + ClusterState getPreviousClusterState() { + return previousClusterState; + } + + void setIncrementalWrite(boolean incrementalWrite) { + this.incrementalWrite = incrementalWrite; + } + + /** + * Updates manifest and meta data on disk. + * + * @param newState new {@link ClusterState} + * @param previousState previous {@link ClusterState} + * + * @throws WriteStateException if exception occurs. See also {@link WriteStateException#isDirty()}. + */ + void updateClusterState(ClusterState newState, ClusterState previousState) throws WriteStateException { + MetaData newMetaData = newState.metaData(); + + final long startTimeMillis = relativeTimeMillisSupplier.getAsLong(); + + final AtomicClusterStateWriter writer = new AtomicClusterStateWriter(metaStateService, previousManifest); + long globalStateGeneration = writeGlobalState(writer, newMetaData); + Map indexGenerations = writeIndicesMetadata(writer, newState, previousState); + Manifest manifest = new Manifest(previousManifest.getCurrentTerm(), newState.version(), globalStateGeneration, indexGenerations); + writeManifest(writer, manifest); + previousManifest = manifest; + previousClusterState = newState; + + final long durationMillis = relativeTimeMillisSupplier.getAsLong() - startTimeMillis; + final TimeValue finalSlowWriteLoggingThreshold = this.slowWriteLoggingThreshold; + if (durationMillis >= finalSlowWriteLoggingThreshold.getMillis()) { + logger.warn("writing cluster state took [{}ms] which is above the warn threshold of [{}]; " + + "wrote metadata for [{}] indices and skipped [{}] unchanged indices", + durationMillis, finalSlowWriteLoggingThreshold, writer.getIndicesWritten(), writer.getIndicesSkipped()); + } else { + logger.debug("writing cluster state took [{}ms]; wrote metadata for [{}] indices and skipped [{}] unchanged indices", + durationMillis, writer.getIndicesWritten(), writer.getIndicesSkipped()); + } + } + + private void writeManifest(AtomicClusterStateWriter writer, Manifest manifest) throws WriteStateException { + if (manifest.equals(previousManifest) == false) { + writer.writeManifestAndCleanup("changed", manifest); + } + } + + private Map writeIndicesMetadata(AtomicClusterStateWriter writer, ClusterState newState, ClusterState previousState) + throws WriteStateException { + Map previouslyWrittenIndices = previousManifest.getIndexGenerations(); + Set relevantIndices = getRelevantIndices(newState, previousState, previouslyWrittenIndices.keySet()); + + Map newIndices = new HashMap<>(); + + MetaData previousMetaData = incrementalWrite ? previousState.metaData() : null; + Iterable actions = resolveIndexMetaDataActions(previouslyWrittenIndices, relevantIndices, previousMetaData, + newState.metaData()); + + for (IndexMetaDataAction action : actions) { + long generation = action.execute(writer); + newIndices.put(action.getIndex(), generation); + } + + return newIndices; + } + + private long writeGlobalState(AtomicClusterStateWriter writer, MetaData newMetaData) throws WriteStateException { + if (incrementalWrite == false || MetaData.isGlobalStateEquals(previousClusterState.metaData(), newMetaData) == false) { + return writer.writeGlobalState("changed", newMetaData); + } + return previousManifest.getGlobalGeneration(); + } + + + /** + * Returns list of {@link IndexMetaDataAction} for each relevant index. + * For each relevant index there are 3 options: + *
    + *
  1. + * {@link KeepPreviousGeneration} - index metadata is already stored to disk and index metadata version is not changed, no + * action is required. + *
  2. + *
  3. + * {@link WriteNewIndexMetaData} - there is no index metadata on disk and index metadata for this index should be written. + *
  4. + *
  5. + * {@link WriteChangedIndexMetaData} - index metadata is already on disk, but index metadata version has changed. Updated + * index metadata should be written to disk. + *
  6. + *
+ * + * @param previouslyWrittenIndices A list of indices for which the state was already written before + * @param relevantIndices The list of indices for which state should potentially be written + * @param previousMetaData The last meta data we know of + * @param newMetaData The new metadata + * @return list of {@link IndexMetaDataAction} for each relevant index. + */ + // exposed for tests + static List resolveIndexMetaDataActions(Map previouslyWrittenIndices, + Set relevantIndices, + MetaData previousMetaData, + MetaData newMetaData) { + List actions = new ArrayList<>(); + for (Index index : relevantIndices) { + IndexMetaData newIndexMetaData = newMetaData.getIndexSafe(index); + IndexMetaData previousIndexMetaData = previousMetaData == null ? null : previousMetaData.index(index); + + if (previouslyWrittenIndices.containsKey(index) == false || previousIndexMetaData == null) { + actions.add(new WriteNewIndexMetaData(newIndexMetaData)); + } else if (previousIndexMetaData.getVersion() != newIndexMetaData.getVersion()) { + actions.add(new WriteChangedIndexMetaData(previousIndexMetaData, newIndexMetaData)); + } else { + actions.add(new KeepPreviousGeneration(index, previouslyWrittenIndices.get(index))); + } + } + return actions; + } + + private static Set getRelevantIndicesOnDataOnlyNode(ClusterState state, ClusterState previousState, Set + previouslyWrittenIndices) { + RoutingNode newRoutingNode = state.getRoutingNodes().node(state.nodes().getLocalNodeId()); + if (newRoutingNode == null) { + throw new IllegalStateException("cluster state does not contain this node - cannot write index meta state"); + } + Set indices = new HashSet<>(); + for (ShardRouting routing : newRoutingNode) { + indices.add(routing.index()); + } + // we have to check the meta data also: closed indices will not appear in the routing table, but we must still write the state if + // we have it written on disk previously + for (IndexMetaData indexMetaData : state.metaData()) { + boolean isOrWasClosed = indexMetaData.getState().equals(IndexMetaData.State.CLOSE); + // if the index is open we might still have to write the state if it just transitioned from closed to open + // so we have to check for that as well. + IndexMetaData previousMetaData = previousState.metaData().index(indexMetaData.getIndex()); + if (previousMetaData != null) { + isOrWasClosed = isOrWasClosed || previousMetaData.getState().equals(IndexMetaData.State.CLOSE); + } + if (previouslyWrittenIndices.contains(indexMetaData.getIndex()) && isOrWasClosed) { + indices.add(indexMetaData.getIndex()); + } + } + return indices; + } + + private static Set getRelevantIndicesForMasterEligibleNode(ClusterState state) { + Set relevantIndices = new HashSet<>(); + // we have to iterate over the metadata to make sure we also capture closed indices + for (IndexMetaData indexMetaData : state.metaData()) { + relevantIndices.add(indexMetaData.getIndex()); + } + return relevantIndices; + } + + // exposed for tests + static Set getRelevantIndices(ClusterState state, ClusterState previousState, Set previouslyWrittenIndices) { + Set relevantIndices; + if (isDataOnlyNode(state)) { + relevantIndices = getRelevantIndicesOnDataOnlyNode(state, previousState, previouslyWrittenIndices); + } else if (state.nodes().getLocalNode().isMasterNode()) { + relevantIndices = getRelevantIndicesForMasterEligibleNode(state); + } else { + relevantIndices = Collections.emptySet(); + } + return relevantIndices; + } + + private static boolean isDataOnlyNode(ClusterState state) { + return state.nodes().getLocalNode().isMasterNode() == false && state.nodes().getLocalNode().isDataNode(); + } + + /** + * Action to perform with index metadata. + */ + interface IndexMetaDataAction { + /** + * @return index for index metadata. + */ + Index getIndex(); + + /** + * Executes this action using provided {@link AtomicClusterStateWriter}. + * + * @return new index metadata state generation, to be used in manifest file. + * @throws WriteStateException if exception occurs. + */ + long execute(AtomicClusterStateWriter writer) throws WriteStateException; + } + + /** + * This class is used to write changed global {@link MetaData}, {@link IndexMetaData} and {@link Manifest} to disk. + * This class delegates write* calls to corresponding write calls in {@link MetaStateService} and + * additionally it keeps track of cleanup actions to be performed if transaction succeeds or fails. + */ + static class AtomicClusterStateWriter { + private static final String FINISHED_MSG = "AtomicClusterStateWriter is finished"; + private final List commitCleanupActions; + private final List rollbackCleanupActions; + private final Manifest previousManifest; + private final MetaStateService metaStateService; + private boolean finished; + + private int indicesWritten; + private int indicesSkipped; + + AtomicClusterStateWriter(MetaStateService metaStateService, Manifest previousManifest) { + this.metaStateService = metaStateService; + assert previousManifest != null; + this.previousManifest = previousManifest; + this.commitCleanupActions = new ArrayList<>(); + this.rollbackCleanupActions = new ArrayList<>(); + this.finished = false; + } + + long writeGlobalState(String reason, MetaData metaData) throws WriteStateException { + assert finished == false : FINISHED_MSG; + try { + rollbackCleanupActions.add(() -> metaStateService.cleanupGlobalState(previousManifest.getGlobalGeneration())); + long generation = metaStateService.writeGlobalState(reason, metaData); + commitCleanupActions.add(() -> metaStateService.cleanupGlobalState(generation)); + return generation; + } catch (WriteStateException e) { + rollback(); + throw e; + } + } + + long writeIndex(String reason, IndexMetaData metaData) throws WriteStateException { + assert finished == false : FINISHED_MSG; + try { + Index index = metaData.getIndex(); + Long previousGeneration = previousManifest.getIndexGenerations().get(index); + if (previousGeneration != null) { + // we prefer not to clean-up index metadata in case of rollback, + // if it's not referenced by previous manifest file + // not to break dangling indices functionality + rollbackCleanupActions.add(() -> metaStateService.cleanupIndex(index, previousGeneration)); + } + long generation = metaStateService.writeIndex(reason, metaData); + commitCleanupActions.add(() -> metaStateService.cleanupIndex(index, generation)); + return generation; + } catch (WriteStateException e) { + rollback(); + throw e; + } + } + + void writeManifestAndCleanup(String reason, Manifest manifest) throws WriteStateException { + assert finished == false : FINISHED_MSG; + try { + metaStateService.writeManifestAndCleanup(reason, manifest); + commitCleanupActions.forEach(Runnable::run); + finished = true; + } catch (WriteStateException e) { + // If the Manifest write results in a dirty WriteStateException it's not safe to roll back, removing the new metadata files, + // because if the Manifest was actually written to disk and its deletion fails it will reference these new metadata files. + // On master-eligible nodes a dirty WriteStateException here is fatal to the node since we no longer really have any idea + // what the state on disk is and the only sensible response is to start again from scratch. + if (e.isDirty() == false) { + rollback(); + } + throw e; + } + } + + void rollback() { + rollbackCleanupActions.forEach(Runnable::run); + finished = true; + } + + void incrementIndicesWritten() { + indicesWritten++; + } + + void incrementIndicesSkipped() { + indicesSkipped++; + } + + int getIndicesWritten() { + return indicesWritten; + } + + int getIndicesSkipped() { + return indicesSkipped; + } + } + + static class KeepPreviousGeneration implements IndexMetaDataAction { + private final Index index; + private final long generation; + + KeepPreviousGeneration(Index index, long generation) { + this.index = index; + this.generation = generation; + } + + @Override + public Index getIndex() { + return index; + } + + @Override + public long execute(AtomicClusterStateWriter writer) { + writer.incrementIndicesSkipped(); + return generation; + } + } + + static class WriteNewIndexMetaData implements IndexMetaDataAction { + private final IndexMetaData indexMetaData; + + WriteNewIndexMetaData(IndexMetaData indexMetaData) { + this.indexMetaData = indexMetaData; + } + + @Override + public Index getIndex() { + return indexMetaData.getIndex(); + } + + @Override + public long execute(AtomicClusterStateWriter writer) throws WriteStateException { + writer.incrementIndicesWritten(); + return writer.writeIndex("freshly created", indexMetaData); + } + } + + static class WriteChangedIndexMetaData implements IndexMetaDataAction { + private final IndexMetaData newIndexMetaData; + private final IndexMetaData oldIndexMetaData; + + WriteChangedIndexMetaData(IndexMetaData oldIndexMetaData, IndexMetaData newIndexMetaData) { + this.oldIndexMetaData = oldIndexMetaData; + this.newIndexMetaData = newIndexMetaData; + } + + @Override + public Index getIndex() { + return newIndexMetaData.getIndex(); + } + + @Override + public long execute(AtomicClusterStateWriter writer) throws WriteStateException { + writer.incrementIndicesWritten(); + return writer.writeIndex( + "version changed from [" + oldIndexMetaData.getVersion() + "] to [" + newIndexMetaData.getVersion() + "]", + newIndexMetaData); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredTokenFilter.java b/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredTokenFilter.java index 5325821d050c..f8e14ef9af52 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredTokenFilter.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredTokenFilter.java @@ -19,11 +19,9 @@ package org.elasticsearch.index.analysis; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.elasticsearch.Version; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.indices.analysis.PreBuiltCacheFactory; import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; @@ -35,9 +33,6 @@ import java.util.function.Function; */ public final class PreConfiguredTokenFilter extends PreConfiguredAnalysisComponent { - private static final DeprecationLogger DEPRECATION_LOGGER - = new DeprecationLogger(LogManager.getLogger(PreConfiguredTokenFilter.class)); - /** * Create a pre-configured token filter that may not vary at all. */ diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 8aadc64806c5..5bfec3c2e9eb 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -1869,13 +1869,6 @@ public abstract class Engine implements Closeable { */ public abstract void skipTranslogRecovery(); - /** - * Returns true iff this engine is currently recovering from translog. - */ - public boolean isRecovering() { - return false; - } - /** * Tries to prune buffered deletes from the version map. */ diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 8fe9547cd733..81ee1132c59a 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -88,7 +88,6 @@ import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ElasticsearchMergePolicy; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.FsDirectoryFactory; -import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.index.translog.TranslogCorruptedException; @@ -98,7 +97,6 @@ import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; import java.io.IOException; -import java.nio.file.Path; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -203,7 +201,7 @@ public class InternalEngine extends Engine { mergeScheduler = scheduler = new EngineMergeScheduler(engineConfig.getShardId(), engineConfig.getIndexSettings()); throttle = new IndexThrottle(); try { - trimUnsafeCommits(engineConfig); + store.trimUnsafeCommits(config().getTranslogConfig().getTranslogPath()); translog = openTranslog(engineConfig, translogDeletionPolicy, engineConfig.getGlobalCheckpointSupplier(), seqNo -> { final LocalCheckpointTracker tracker = getLocalCheckpointTracker(); @@ -2459,7 +2457,7 @@ public class InternalEngine extends Engine { } } - private void ensureCanFlush() { + final void ensureCanFlush() { // translog recover happens after the engine is fully constructed // if we are in this stage we have to prevent flushes from this // engine otherwise we might loose documents if the flush succeeds @@ -2651,11 +2649,6 @@ public class InternalEngine extends Engine { } } - @Override - public boolean isRecovering() { - return pendingTranslogRecovery.get(); - } - /** * Gets the commit data from {@link IndexWriter} as a map. */ @@ -2806,15 +2799,6 @@ public class InternalEngine extends Engine { return true; } - private static void trimUnsafeCommits(EngineConfig engineConfig) throws IOException { - final Store store = engineConfig.getStore(); - final String translogUUID = store.readLastCommittedSegmentsInfo().getUserData().get(Translog.TRANSLOG_UUID_KEY); - final Path translogPath = engineConfig.getTranslogConfig().getTranslogPath(); - final long globalCheckpoint = Translog.readGlobalCheckpoint(translogPath, translogUUID); - final long minRetainedTranslogGen = Translog.readMinTranslogGeneration(translogPath, translogUUID); - store.trimUnsafeCommits(globalCheckpoint, minRetainedTranslogGen, engineConfig.getIndexSettings().getIndexVersionCreated()); - } - /** * Restores the live version map and local checkpoint of this engine using documents (including soft-deleted) * after the local checkpoint in the safe commit. This step ensures the live version map and checkpoint tracker diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java b/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java index b271084a0d29..a61059fb97d2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java @@ -19,8 +19,6 @@ package org.elasticsearch.index.mapper; -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -35,8 +33,6 @@ import java.util.TreeMap; public class DynamicTemplate implements ToXContentObject { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(DynamicTemplate.class)); - public enum MatchType { SIMPLE { @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java index 84211e69cf42..ae16aab41686 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java @@ -24,6 +24,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; +import org.elasticsearch.Version; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -47,8 +48,7 @@ import java.util.Objects; */ public class FieldNamesFieldMapper extends MetadataFieldMapper { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger( - LogManager.getLogger(FieldNamesFieldMapper.class)); + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(FieldNamesFieldMapper.class)); public static final String NAME = "_field_names"; @@ -111,8 +111,13 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { String indexName = parserContext.mapperService().index().getName(); - deprecationLogger.deprecatedAndMaybeLog("field_names_enabled_parameter", ENABLED_DEPRECATION_MESSAGE, indexName); - builder.enabled(XContentMapValues.nodeBooleanValue(fieldNode, name + ".enabled")); + if (parserContext.indexVersionCreated().onOrAfter(Version.V_8_0_0)) { + throw new MapperParsingException("The `enabled` setting for the `_field_names` field has been deprecated and " + + "removed but is still used in index [{}]. Please remove it from your mappings and templates."); + } else { + deprecationLogger.deprecatedAndMaybeLog("field_names_enabled_parameter", ENABLED_DEPRECATION_MESSAGE, indexName); + builder.enabled(XContentMapValues.nodeBooleanValue(fieldNode, name + ".enabled")); + } iterator.remove(); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java index 4e6906401351..c3693f4ded9f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java @@ -28,7 +28,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -38,7 +37,6 @@ import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.regex.Pattern; public class IndexFieldMapper extends MetadataFieldMapper { @@ -175,20 +173,6 @@ public class IndexFieldMapper extends MetadataFieldMapper { } } - @Override - public Query regexpQuery(String value, int flags, int maxDeterminizedStates, - MultiTermQuery.RewriteMethod method, QueryShardContext context) { - String indexName = context.getFullyQualifiedIndex().getName(); - Pattern pattern = Regex.compile(value, Regex.flagsToString(flags)); - - if (pattern.matcher(indexName).matches()) { - return Queries.newMatchAllQuery(); - } else { - return Queries.newMatchNoDocsQuery("The index [" + context.getFullyQualifiedIndex().getName() - + "] doesn't match the provided pattern [" + value + "]."); - } - } - @Override public Query wildcardQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, diff --git a/server/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java index e09d71938add..def29b91a7f8 100644 --- a/server/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.query; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.Version; @@ -28,7 +27,6 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -50,12 +48,8 @@ import static org.elasticsearch.common.xcontent.ObjectParser.fromList; * A query that will return only documents matching specific ids (and a type). */ public class IdsQueryBuilder extends AbstractQueryBuilder { - public static final String NAME = "ids"; - private static final DeprecationLogger deprecationLogger = new DeprecationLogger( - LogManager.getLogger(IdsQueryBuilder.class)); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [ids] queries."; - private static final ParseField TYPE_FIELD = new ParseField("type"); + public static final String NAME = "ids"; private static final ParseField VALUES_FIELD = new ParseField("values"); private final Set ids = new HashSet<>(); diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java index 92ba3c5d2b84..a82bea8f0922 100644 --- a/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java @@ -60,6 +60,10 @@ public abstract class InnerHitContextBuilder { doValidate(queryShardContext); } + public InnerHitBuilder innerHitBuilder() { + return innerHitBuilder; + } + protected abstract void doValidate(QueryShardContext queryShardContext); public abstract void build(SearchContext parentSearchContext, InnerHitsContext innerHitsContext) throws IOException; diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index 1decda0d6338..7760d3fdb793 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -445,4 +445,12 @@ public class QueryShardContext extends QueryRewriteContext { public BigArrays bigArrays() { return bigArrays; } + + public SimilarityService getSimilarityService() { + return similarityService; + } + + public BitsetFilterCache getBitsetFilterCache() { + return bitsetFilterCache; + } } diff --git a/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java index 0d11f6012daf..15e895ecf134 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.query; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.util.BytesRef; @@ -34,7 +33,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -64,11 +62,6 @@ import java.util.stream.IntStream; public class TermsQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "terms"; - private static final DeprecationLogger deprecationLogger = new DeprecationLogger( - LogManager.getLogger(TermsQueryBuilder.class)); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Types are deprecated " + - "in [terms] lookup queries."; - private final String fieldName; private final List values; private final TermsLookup termsLookup; @@ -217,10 +210,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder { return this.termsLookup; } - public boolean isTypeless() { - return termsLookup == null || termsLookup.type() == null; - } - private static final Set> INTEGER_TYPES = new HashSet<>( Arrays.asList(Byte.class, Short.class, Integer.class, Long.class)); private static final Set> STRING_TYPES = new HashSet<>( @@ -406,10 +395,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder { .boost(boost) .queryName(queryName); - if (builder.isTypeless() == false) { - deprecationLogger.deprecatedAndMaybeLog("terms_lookup_with_types", TYPES_DEPRECATION_MESSAGE); - } - return builder; } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java index 04bfb1c36adf..83bd0392f255 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java @@ -169,11 +169,6 @@ public class ClientScrollableHitSource extends ScrollableHitSource { return delegate.getIndex(); } - @Override - public String getType() { - return delegate.getType(); - } - @Override public String getId() { return delegate.getId(); diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java b/server/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java index 07d22ddb663f..c1a545c1dacd 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java @@ -228,10 +228,6 @@ public abstract class ScrollableHitSource { * The index in which the hit is stored. */ String getIndex(); - /** - * The type that the hit has. - */ - String getType(); /** * The document id of the hit. */ @@ -272,7 +268,6 @@ public abstract class ScrollableHitSource { */ public static class BasicHit implements Hit { private final String index; - private final String type; private final String id; private final long version; @@ -282,9 +277,8 @@ public abstract class ScrollableHitSource { private long seqNo; private long primaryTerm; - public BasicHit(String index, String type, String id, long version) { + public BasicHit(String index, String id, long version) { this.index = index; - this.type = type; this.id = id; this.version = version; } @@ -294,11 +288,6 @@ public abstract class ScrollableHitSource { return index; } - @Override - public String getType() { - return type; - } - @Override public String getId() { return id; diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 13f9092a3ae2..fa452270fab6 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -1054,12 +1054,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl public Engine.SyncedFlushResult syncFlush(String syncId, Engine.CommitId expectedCommitId) { verifyNotClosed(); logger.trace("trying to sync flush. sync id [{}]. expected commit id [{}]]", syncId, expectedCommitId); - Engine engine = getEngine(); - if (engine.isRecovering()) { - throw new IllegalIndexShardStateException(shardId(), state, "syncFlush is only allowed if the engine is not recovery" + - " from translog"); - } - return engine.syncFlush(syncId, expectedCommitId); + return getEngine().syncFlush(syncId, expectedCommitId); } /** @@ -1078,15 +1073,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * since we use Engine#writeIndexingBuffer for this now. */ verifyNotClosed(); - final Engine engine = getEngine(); - if (engine.isRecovering()) { - throw new IllegalIndexShardStateException( - shardId(), - state, - "flush is only allowed if the engine is not recovery from translog"); - } final long time = System.nanoTime(); - final Engine.CommitId commitId = engine.flush(force, waitIfOngoing); + final Engine.CommitId commitId = getEngine().flush(force, waitIfOngoing); flushMetric.inc(System.nanoTime() - time); return commitId; } diff --git a/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java b/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java index 766a3187ef8f..129ce08ebd82 100644 --- a/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java +++ b/server/src/main/java/org/elasticsearch/index/snapshots/IndexShardSnapshotStatus.java @@ -58,6 +58,7 @@ public class IndexShardSnapshotStatus { } private final AtomicReference stage; + private final AtomicReference generation; private long startTime; private long totalTime; private int incrementalFileCount; @@ -71,8 +72,10 @@ public class IndexShardSnapshotStatus { private IndexShardSnapshotStatus(final Stage stage, final long startTime, final long totalTime, final int incrementalFileCount, final int totalFileCount, final int processedFileCount, - final long incrementalSize, final long totalSize, final long processedSize, final String failure) { + final long incrementalSize, final long totalSize, final long processedSize, final String failure, + final String generation) { this.stage = new AtomicReference<>(Objects.requireNonNull(stage)); + this.generation = new AtomicReference<>(generation); this.startTime = startTime; this.totalTime = totalTime; this.incrementalFileCount = incrementalFileCount; @@ -109,9 +112,11 @@ public class IndexShardSnapshotStatus { return asCopy(); } - public synchronized void moveToDone(final long endTime) { + public synchronized void moveToDone(final long endTime, final String newGeneration) { + assert newGeneration != null; if (stage.compareAndSet(Stage.FINALIZE, Stage.DONE)) { this.totalTime = Math.max(0L, endTime - startTime); + this.generation.set(newGeneration); } else { throw new IllegalStateException("Unable to move the shard snapshot status to [DONE]: " + "expecting [FINALIZE] but got [" + stage.get() + "]"); @@ -131,6 +136,10 @@ public class IndexShardSnapshotStatus { } } + public String generation() { + return generation.get(); + } + public boolean isAborted() { return stage.get() == Stage.ABORTED; } @@ -156,8 +165,8 @@ public class IndexShardSnapshotStatus { indexVersion, failure); } - public static IndexShardSnapshotStatus newInitializing() { - return new IndexShardSnapshotStatus(Stage.INIT, 0L, 0L, 0, 0, 0, 0, 0, 0, null); + public static IndexShardSnapshotStatus newInitializing(String generation) { + return new IndexShardSnapshotStatus(Stage.INIT, 0L, 0L, 0, 0, 0, 0, 0, 0, null, generation); } public static IndexShardSnapshotStatus newFailed(final String failure) { @@ -165,15 +174,15 @@ public class IndexShardSnapshotStatus { if (failure == null) { throw new IllegalArgumentException("A failure description is required for a failed IndexShardSnapshotStatus"); } - return new IndexShardSnapshotStatus(Stage.FAILURE, 0L, 0L, 0, 0, 0, 0, 0, 0, failure); + return new IndexShardSnapshotStatus(Stage.FAILURE, 0L, 0L, 0, 0, 0, 0, 0, 0, failure, null); } public static IndexShardSnapshotStatus newDone(final long startTime, final long totalTime, final int incrementalFileCount, final int fileCount, - final long incrementalSize, final long size) { + final long incrementalSize, final long size, String generation) { // The snapshot is done which means the number of processed files is the same as total return new IndexShardSnapshotStatus(Stage.DONE, startTime, totalTime, incrementalFileCount, fileCount, incrementalFileCount, - incrementalSize, size, incrementalSize, null); + incrementalSize, size, incrementalSize, null, generation); } /** diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index eae9ca3a8bda..604bf44fab17 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -1484,22 +1484,6 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref } } - - /** - * Checks that the Lucene index contains a history uuid marker. If not, a new one is generated and committed. - */ - public void ensureIndexHasHistoryUUID() throws IOException { - metadataLock.writeLock().lock(); - try (IndexWriter writer = newAppendingIndexWriter(directory, null)) { - final Map userData = getUserData(writer); - if (userData.containsKey(Engine.HISTORY_UUID_KEY) == false) { - updateCommitData(writer, Collections.singletonMap(Engine.HISTORY_UUID_KEY, UUIDs.randomBase64UUID())); - } - } finally { - metadataLock.writeLock().unlock(); - } - } - /** * Keeping existing unsafe commits when opening an engine can be problematic because these commits are not safe * at the recovering time but they can suddenly become safe in the future. @@ -1517,31 +1501,17 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref * commit on the replica will cause exception as the new last commit c3 will have recovery_translog_gen=1. The recovery * translog generation of a commit is calculated based on the current local checkpoint. The local checkpoint of c3 is 1 * while the local checkpoint of c2 is 2. - *

- * 3. Commit without translog can be used in recovery. An old index, which was created before multiple-commits is introduced - * (v6.2), may not have a safe commit. If that index has a snapshotted commit without translog and an unsafe commit, - * the policy can consider the snapshotted commit as a safe commit for recovery even the commit does not have translog. */ - public void trimUnsafeCommits(final long lastSyncedGlobalCheckpoint, final long minRetainedTranslogGen, - final org.elasticsearch.Version indexVersionCreated) throws IOException { + public void trimUnsafeCommits(final Path translogPath) throws IOException { metadataLock.writeLock().lock(); try { final List existingCommits = DirectoryReader.listCommits(directory); - if (existingCommits.isEmpty()) { - throw new IllegalArgumentException("No index found to trim"); - } - final IndexCommit lastIndexCommitCommit = existingCommits.get(existingCommits.size() - 1); - final String translogUUID = lastIndexCommitCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY); - final IndexCommit startingIndexCommit; - // TODO: Asserts the starting commit is a safe commit once peer-recovery sets global checkpoint. - startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(existingCommits, lastSyncedGlobalCheckpoint); - - if (translogUUID.equals(startingIndexCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY)) == false) { - throw new IllegalStateException("starting commit translog uuid [" - + startingIndexCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY) + "] is not equal to last commit's translog uuid [" - + translogUUID + "]"); - } - if (startingIndexCommit.equals(lastIndexCommitCommit) == false) { + assert existingCommits.isEmpty() == false; + final IndexCommit lastIndexCommit = existingCommits.get(existingCommits.size() - 1); + final String translogUUID = lastIndexCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY); + final long lastSyncedGlobalCheckpoint = Translog.readGlobalCheckpoint(translogPath, translogUUID); + final IndexCommit startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(existingCommits, lastSyncedGlobalCheckpoint); + if (startingIndexCommit.equals(lastIndexCommit) == false) { try (IndexWriter writer = newAppendingIndexWriter(directory, startingIndexCommit)) { // this achieves two things: // - by committing a new commit based on the starting commit, it make sure the starting commit will be opened diff --git a/server/src/main/java/org/elasticsearch/indices/TermsLookup.java b/server/src/main/java/org/elasticsearch/indices/TermsLookup.java index d309df70b0a1..db3eef31db27 100644 --- a/server/src/main/java/org/elasticsearch/indices/TermsLookup.java +++ b/server/src/main/java/org/elasticsearch/indices/TermsLookup.java @@ -19,7 +19,7 @@ package org.elasticsearch.indices; -import org.elasticsearch.common.Nullable; +import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.TermsQueryBuilder; import java.io.IOException; @@ -37,21 +38,11 @@ import java.util.Objects; */ public class TermsLookup implements Writeable, ToXContentFragment { private final String index; - private @Nullable String type; private final String id; private final String path; private String routing; - public TermsLookup(String index, String id, String path) { - this(index, null, id, path); - } - - /** - * @deprecated Types are in the process of being removed, use {@link TermsLookup(String, String, String)} instead. - */ - @Deprecated - public TermsLookup(String index, String type, String id, String path) { if (id == null) { throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the id."); } @@ -62,7 +53,6 @@ public class TermsLookup implements Writeable, ToXContentFragment { throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the index."); } this.index = index; - this.type = type; this.id = id; this.path = path; } @@ -71,7 +61,9 @@ public class TermsLookup implements Writeable, ToXContentFragment { * Read from a stream. */ public TermsLookup(StreamInput in) throws IOException { - type = in.readOptionalString(); + if (in.getVersion().before(Version.V_8_0_0)) { + in.readOptionalString(); + } id = in.readString(); path = in.readString(); index = in.readString(); @@ -80,7 +72,9 @@ public class TermsLookup implements Writeable, ToXContentFragment { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeOptionalString(type); + if (out.getVersion().before(Version.V_8_0_0)) { + out.writeOptionalString(MapperService.SINGLE_MAPPING_NAME); + } out.writeString(id); out.writeString(path); out.writeString(index); @@ -91,14 +85,6 @@ public class TermsLookup implements Writeable, ToXContentFragment { return index; } - /** - * @deprecated Types are in the process of being removed. - */ - @Deprecated - public String type() { - return type; - } - public String id() { return id; } @@ -118,7 +104,6 @@ public class TermsLookup implements Writeable, ToXContentFragment { public static TermsLookup parseTermsLookup(XContentParser parser) throws IOException { String index = null; - String type = null; String id = null; String path = null; String routing = null; @@ -132,9 +117,6 @@ public class TermsLookup implements Writeable, ToXContentFragment { case "index": index = parser.text(); break; - case "type": - type = parser.text(); - break; case "id": id = parser.text(); break; @@ -153,28 +135,17 @@ public class TermsLookup implements Writeable, ToXContentFragment { + token + "] after [" + currentFieldName + "]"); } } - if (type == null) { - return new TermsLookup(index, id, path).routing(routing); - } else { - return new TermsLookup(index, type, id, path).routing(routing); - } + return new TermsLookup(index, id, path).routing(routing); } @Override public String toString() { - if (type == null) { - return index + "/" + id + "/" + path; - } else { - return index + "/" + type + "/" + id + "/" + path; - } + return index + "/" + id + "/" + path; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field("index", index); - if (type != null) { - builder.field("type", type); - } builder.field("id", id); builder.field("path", path); if (routing != null) { @@ -185,7 +156,7 @@ public class TermsLookup implements Writeable, ToXContentFragment { @Override public int hashCode() { - return Objects.hash(index, type, id, path, routing); + return Objects.hash(index, id, path, routing); } @Override @@ -198,7 +169,6 @@ public class TermsLookup implements Writeable, ToXContentFragment { } TermsLookup other = (TermsLookup) obj; return Objects.equals(index, other.index) && - Objects.equals(type, other.type) && Objects.equals(id, other.id) && Objects.equals(path, other.path) && Objects.equals(routing, other.routing); diff --git a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index 5797843161c5..536d0b15b5f2 100644 --- a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -339,6 +339,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { // child circuit breakers is categorized as transient or permanent. CircuitBreaker.Durability durability = memoryUsed.transientChildUsage >= memoryUsed.permanentChildUsage ? CircuitBreaker.Durability.TRANSIENT : CircuitBreaker.Durability.PERMANENT; + logger.debug("{}", message); throw new CircuitBreakingException(message.toString(), memoryUsed.totalUsage, parentLimit, durability); } } diff --git a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 157f6d7d05ca..d189dfd33395 100644 --- a/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -586,8 +586,17 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple final IndexMetaData newIndexMetaData = state.metaData().index(index); assert newIndexMetaData != null : "index " + index + " should have been removed by deleteIndices"; if (ClusterChangedEvent.indexMetaDataChanged(currentIndexMetaData, newIndexMetaData)) { - indexService.updateMetaData(currentIndexMetaData, newIndexMetaData); + String reason = null; try { + reason = "metadata update failed"; + try { + indexService.updateMetaData(currentIndexMetaData, newIndexMetaData); + } catch (Exception e) { + assert false : e; + throw e; + } + + reason = "mapping update failed"; if (indexService.updateMapping(currentIndexMetaData, newIndexMetaData) && sendRefreshMapping) { nodeMappingRefreshAction.nodeMappingRefresh(state.nodes().getMasterNode(), new NodeMappingRefreshAction.NodeMappingRefreshRequest(newIndexMetaData.getIndex().getName(), @@ -595,14 +604,14 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple ); } } catch (Exception e) { - indicesService.removeIndex(indexService.index(), FAILURE, "removing index (mapping update failed)"); + indicesService.removeIndex(indexService.index(), FAILURE, "removing index (" + reason + ")"); // fail shards that would be created or updated by createOrUpdateShards RoutingNode localRoutingNode = state.getRoutingNodes().node(state.nodes().getLocalNodeId()); if (localRoutingNode != null) { for (final ShardRouting shardRouting : localRoutingNode) { if (shardRouting.index().equals(index) && failedShardsCache.containsKey(shardRouting.shardId()) == false) { - sendFailShard(shardRouting, "failed to update mapping for index", e, state); + sendFailShard(shardRouting, "failed to update index (" + reason + ")", e, state); } } } diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index ed3999adf7e6..c302b143b932 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -480,7 +480,7 @@ public class Node implements Closeable { ).collect(Collectors.toSet()); final TransportService transportService = newTransportService(settings, transport, threadPool, networkModule.getTransportInterceptor(), localNodeFactory, settingsModule.getClusterSettings(), taskHeaders); - final GatewayMetaState gatewayMetaState = new GatewayMetaState(settings, metaStateService); + final GatewayMetaState gatewayMetaState = new GatewayMetaState(); final ResponseCollectorService responseCollectorService = new ResponseCollectorService(clusterService); final SearchTransportService searchTransportService = new SearchTransportService(transportService, SearchExecutionStatsCollector.makeWrapper(responseCollectorService)); @@ -694,7 +694,7 @@ public class Node implements Closeable { // Load (and maybe upgrade) the metadata stored on disk final GatewayMetaState gatewayMetaState = injector.getInstance(GatewayMetaState.class); - gatewayMetaState.start(transportService, clusterService, + gatewayMetaState.start(settings(), transportService, clusterService, injector.getInstance(MetaStateService.class), injector.getInstance(MetaDataIndexUpgradeService.class), injector.getInstance(MetaDataUpgrader.class)); // we load the global state here (the persistent part of the cluster state stored on disk) to // pass it to the bootstrap checks to allow plugins to enforce certain preconditions based on the recovered state. diff --git a/server/src/main/java/org/elasticsearch/plugins/Platforms.java b/server/src/main/java/org/elasticsearch/plugins/Platforms.java index 91af58ebec46..8a374701b6e2 100644 --- a/server/src/main/java/org/elasticsearch/plugins/Platforms.java +++ b/server/src/main/java/org/elasticsearch/plugins/Platforms.java @@ -38,6 +38,15 @@ public class Platforms { * The path to the native controller for a plugin with native components. */ public static Path nativeControllerPath(Path plugin) { + if (Constants.MAC_OS_X) { + return plugin + .resolve("platform") + .resolve(PLATFORM_NAME) + .resolve(PROGRAM_NAME + ".app") + .resolve("Contents") + .resolve("MacOS") + .resolve(PROGRAM_NAME); + } return plugin .resolve("platform") .resolve(PLATFORM_NAME) @@ -46,7 +55,26 @@ public class Platforms { } /** - * Return the platform name based on the OS name and + * The fallback path to the native controller for a plugin with native + * components to be used if no program is found using the standard path. + * This is a temporary measure to allow developers not working on this + * functionality to continue to work with C++ bundles from before or + * after the change. This code should never be in a supported release. + * TODO: remove this method before release + */ + public static Path fallbackNativeControllerPath(Path plugin) { + if (Constants.MAC_OS_X) { + return plugin + .resolve("platform") + .resolve(PLATFORM_NAME) + .resolve("bin") + .resolve(PROGRAM_NAME); + } + return null; + } + + /** + * Return the platform name based on the OS name and architecture, for example: * - darwin-x86_64 * - linux-x86-64 * - windows-x86_64 diff --git a/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java b/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java index 6d9cba05748e..07acd55cea03 100644 --- a/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java @@ -123,7 +123,7 @@ public class FilterRepository implements Repository { @Override public void snapshotShard(Store store, MapperService mapperService, SnapshotId snapshotId, IndexId indexId, - IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, ActionListener listener) { + IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, ActionListener listener) { in.snapshotShard(store, mapperService, snapshotId, indexId, snapshotIndexCommit, snapshotStatus, listener); } @Override diff --git a/server/src/main/java/org/elasticsearch/repositories/Repository.java b/server/src/main/java/org/elasticsearch/repositories/Repository.java index a02975e120b3..7eb2196884bb 100644 --- a/server/src/main/java/org/elasticsearch/repositories/Repository.java +++ b/server/src/main/java/org/elasticsearch/repositories/Repository.java @@ -209,7 +209,7 @@ public interface Repository extends LifecycleComponent { * @param listener listener invoked on completion */ void snapshotShard(Store store, MapperService mapperService, SnapshotId snapshotId, IndexId indexId, IndexCommit snapshotIndexCommit, - IndexShardSnapshotStatus snapshotStatus, ActionListener listener); + IndexShardSnapshotStatus snapshotStatus, ActionListener listener); /** * Restores snapshot of the shard. diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index f8bdca75c122..027d2955fad9 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -477,7 +477,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp *

  • Deleting unreferenced root level blobs {@link #cleanupStaleRootFiles}
  • * * @param repositoryStateId Current repository state id - * @param listener Lister to complete when done + * @param listener Listener to complete when done */ public void cleanup(long repositoryStateId, ActionListener listener) { try { @@ -942,10 +942,10 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp @Override public void snapshotShard(Store store, MapperService mapperService, SnapshotId snapshotId, IndexId indexId, - IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, ActionListener listener) { + IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, ActionListener listener) { final ShardId shardId = store.shardId(); final long startTime = threadPool.absoluteTimeInMillis(); - final StepListener snapshotDoneListener = new StepListener<>(); + final StepListener snapshotDoneListener = new StepListener<>(); snapshotDoneListener.whenComplete(listener::onResponse, e -> { snapshotStatus.moveToFailed(threadPool.absoluteTimeInMillis(), ExceptionsHelper.stackTrace(e)); listener.onFailure(e instanceof IndexShardSnapshotFailedException ? (IndexShardSnapshotFailedException) e @@ -1084,8 +1084,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp logger.warn(() -> new ParameterizedMessage("[{}][{}] failed to delete old index-N blobs during finalization", snapshotId, shardId), e); } - snapshotStatus.moveToDone(threadPool.absoluteTimeInMillis()); - snapshotDoneListener.onResponse(null); + snapshotStatus.moveToDone(threadPool.absoluteTimeInMillis(), indexGeneration); + snapshotDoneListener.onResponse(indexGeneration); }, snapshotDoneListener::onFailure); if (indexIncrementalFileCount == 0) { allFilesUploadedListener.onResponse(Collections.emptyList()); @@ -1153,7 +1153,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp BlobStoreIndexShardSnapshot snapshot = loadShardSnapshot(shardContainer(indexId, shardId), snapshotId); return IndexShardSnapshotStatus.newDone(snapshot.startTime(), snapshot.time(), snapshot.incrementalFileCount(), snapshot.totalFileCount(), - snapshot.incrementalSize(), snapshot.totalSize()); + snapshot.incrementalSize(), snapshot.totalSize(), null); // Not adding a real generation here as it doesn't matter to callers } @Override @@ -1205,7 +1205,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp /** * Delete shard snapshot */ - private void deleteShardSnapshot(RepositoryData repositoryData, IndexId indexId, ShardId snapshotShardId, SnapshotId snapshotId) { + private void deleteShardSnapshot(RepositoryData repositoryData, IndexId indexId, ShardId snapshotShardId, SnapshotId snapshotId) + throws IOException { final BlobContainer shardContainer = shardContainer(indexId, snapshotShardId); final Set blobs; try { @@ -1285,19 +1286,16 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp * @param blobs list of blobs in repository * @return tuple of BlobStoreIndexShardSnapshots and the last snapshot index generation */ - private Tuple buildBlobStoreIndexShardSnapshots(Set blobs, BlobContainer shardContainer) { + private Tuple buildBlobStoreIndexShardSnapshots(Set blobs, BlobContainer shardContainer) + throws IOException { long latest = latestGeneration(blobs); if (latest >= 0) { - try { - final BlobStoreIndexShardSnapshots shardSnapshots = - indexShardSnapshotsFormat.read(shardContainer, Long.toString(latest)); - return new Tuple<>(shardSnapshots, latest); - } catch (IOException e) { - final String file = SNAPSHOT_INDEX_PREFIX + latest; - logger.warn(() -> new ParameterizedMessage("failed to read index file [{}]", file), e); - } - } else if (blobs.isEmpty() == false) { - logger.warn("Could not find a readable index-N file in a non-empty shard snapshot directory [{}]", shardContainer.path()); + final BlobStoreIndexShardSnapshots shardSnapshots = indexShardSnapshotsFormat.read(shardContainer, Long.toString(latest)); + return new Tuple<>(shardSnapshots, latest); + } else if (blobs.stream().anyMatch(b -> b.startsWith(SNAPSHOT_PREFIX) || b.startsWith(INDEX_FILE_PREFIX) + || b.startsWith(DATA_BLOB_PREFIX))) { + throw new IllegalStateException( + "Could not find a readable index-N file in a non-empty shard snapshot directory [" + shardContainer.path() + "]"); } return new Tuple<>(BlobStoreIndexShardSnapshots.EMPTY, latest); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java index 27d8c2f8a8c0..82a5769f3a70 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java @@ -44,18 +44,13 @@ import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.OK; public class RestGetFieldMappingAction extends BaseRestHandler { - - private static final DeprecationLogger deprecationLogger = new DeprecationLogger( - LogManager.getLogger(RestGetFieldMappingAction.class)); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in get " + - "field mapping requests is deprecated. The parameter will be removed in the next major version."; + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetFieldMappingAction.class)); + public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in get " + + "field mapping requests is deprecated. The parameter will be removed in the next major version."; public RestGetFieldMappingAction(RestController controller) { controller.registerHandler(GET, "/_mapping/field/{fields}", this); - controller.registerHandler(GET, "/_mapping/{type}/field/{fields}", this); controller.registerHandler(GET, "/{index}/_mapping/field/{fields}", this); - controller.registerHandler(GET, "/{index}/{type}/_mapping/field/{fields}", this); - controller.registerHandler(GET, "/{index}/_mapping/{type}/field/{fields}", this); } @Override @@ -66,20 +61,18 @@ public class RestGetFieldMappingAction extends BaseRestHandler { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); - final String[] types = request.paramAsStringArrayOrEmptyIfAll("type"); final String[] fields = Strings.splitStringByCommaToArray(request.param("fields")); - boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - if (includeTypeName == false && types.length > 0) { - throw new IllegalArgumentException("Types cannot be specified unless include_type_name" + - " is set to true."); - } if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { + boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); + if (includeTypeName) { + throw new IllegalArgumentException(INCLUDE_TYPE_NAME_PARAMETER + " no longer supports the value [true]."); + } deprecationLogger.deprecatedAndMaybeLog("get_field_mapping_with_types", TYPES_DEPRECATION_MESSAGE); } GetFieldMappingsRequest getMappingsRequest = new GetFieldMappingsRequest(); - getMappingsRequest.indices(indices).types(types).fields(fields).includeDefaults(request.paramAsBoolean("include_defaults", false)); + getMappingsRequest.indices(indices).fields(fields).includeDefaults(request.paramAsBoolean("include_defaults", false)); getMappingsRequest.indicesOptions(IndicesOptions.fromRequest(request, getMappingsRequest.indicesOptions())); getMappingsRequest.local(request.paramAsBoolean("local", getMappingsRequest.local())); return channel -> @@ -88,7 +81,7 @@ public class RestGetFieldMappingAction extends BaseRestHandler { public RestResponse buildResponse(GetFieldMappingsResponse response, XContentBuilder builder) throws Exception { Map>> mappingsByIndex = response.mappings(); - boolean isPossibleSingleFieldRequest = indices.length == 1 && types.length == 1 && fields.length == 1; + boolean isPossibleSingleFieldRequest = indices.length == 1 && fields.length == 1; if (isPossibleSingleFieldRequest && isFieldMappingMissingField(mappingsByIndex)) { return new BytesRestResponse(OK, builder.startObject().endObject()); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java index cc7eba1d2caa..9156f3361eee 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java @@ -19,21 +19,14 @@ package org.elasticsearch.rest.action.admin.indices; -import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.indices.TypeMissingException; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; @@ -43,34 +36,19 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestBuilderListener; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Locale; -import java.util.Set; -import java.util.SortedSet; -import java.util.stream.Collectors; import static org.elasticsearch.rest.RestRequest.Method.GET; -import static org.elasticsearch.rest.RestRequest.Method.HEAD; public class RestGetMappingAction extends BaseRestHandler { - private static final Logger logger = LogManager.getLogger(RestGetMappingAction.class); - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in get" + - " mapping requests is deprecated. The parameter will be removed in the next major version."; + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetMappingAction.class)); + public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in get" + + " mapping requests is deprecated. The parameter will be removed in the next major version."; public RestGetMappingAction(final RestController controller) { controller.registerHandler(GET, "/_mapping", this); controller.registerHandler(GET, "/_mappings", this); - controller.registerHandler(GET, "/{index}/{type}/_mapping", this); controller.registerHandler(GET, "/{index}/_mappings", this); controller.registerHandler(GET, "/{index}/_mapping", this); - controller.registerHandler(GET, "/{index}/_mappings/{type}", this); - controller.registerHandler(GET, "/{index}/_mapping/{type}", this); - controller.registerHandler(HEAD, "/{index}/_mapping/{type}", this); - controller.registerHandler(GET, "/_mapping/{type}", this); } @Override @@ -81,74 +59,24 @@ public class RestGetMappingAction extends BaseRestHandler { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); - final String[] types = request.paramAsStringArrayOrEmptyIfAll("type"); - boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - if (request.method().equals(HEAD)) { - deprecationLogger.deprecated("Type exists requests are deprecated, as types have been deprecated."); - } else if (includeTypeName == false && types.length > 0) { - throw new IllegalArgumentException("Types cannot be provided in get mapping requests, unless" + - " include_type_name is set to true."); - } if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { + request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); deprecationLogger.deprecatedAndMaybeLog("get_mapping_with_types", TYPES_DEPRECATION_MESSAGE); } final GetMappingsRequest getMappingsRequest = new GetMappingsRequest(); - getMappingsRequest.indices(indices).types(types); + getMappingsRequest.indices(indices); getMappingsRequest.indicesOptions(IndicesOptions.fromRequest(request, getMappingsRequest.indicesOptions())); getMappingsRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getMappingsRequest.masterNodeTimeout())); getMappingsRequest.local(request.paramAsBoolean("local", getMappingsRequest.local())); - return channel -> client.admin().indices().getMappings(getMappingsRequest, new RestBuilderListener(channel) { + return channel -> client.admin().indices().getMappings(getMappingsRequest, new RestBuilderListener<>(channel) { @Override public RestResponse buildResponse(final GetMappingsResponse response, final XContentBuilder builder) throws Exception { - final ImmutableOpenMap> mappingsByIndex = response.getMappings(); - if (mappingsByIndex.isEmpty() && types.length != 0) { - builder.close(); - return new BytesRestResponse(channel, new TypeMissingException("_all", String.join(",", types))); - } - - final Set typeNames = new HashSet<>(); - for (final ObjectCursor> cursor : mappingsByIndex.values()) { - for (final ObjectCursor inner : cursor.value.keys()) { - typeNames.add(inner.value); - } - } - - final SortedSet difference = Sets.sortedDifference(Arrays.stream(types).collect(Collectors.toSet()), typeNames); - - // now remove requested aliases that contain wildcards that are simple matches - final List matches = new ArrayList<>(); - outer: - for (final String pattern : difference) { - if (pattern.contains("*")) { - for (final String typeName : typeNames) { - if (Regex.simpleMatch(pattern, typeName)) { - matches.add(pattern); - continue outer; - } - } - } - } - difference.removeAll(matches); - - final RestStatus status; builder.startObject(); - { - if (difference.isEmpty()) { - status = RestStatus.OK; - } else { - status = RestStatus.NOT_FOUND; - final String message = String.format(Locale.ROOT, "type" + (difference.size() == 1 ? "" : "s") + - " [%s] missing", Strings.collectionToCommaDelimitedString(difference)); - builder.field("error", message); - builder.field("status", status.getStatus()); - } - response.toXContent(builder, request); - } + response.toXContent(builder, request); builder.endObject(); - - return new BytesRestResponse(status, builder); + return new BytesRestResponse(RestStatus.OK, builder); } }); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java index 149758c60b4b..a3feaafff15d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java @@ -36,37 +36,21 @@ import java.io.IOException; import java.util.Map; import static org.elasticsearch.client.Requests.putMappingRequest; -import static org.elasticsearch.index.mapper.MapperService.isMappingSourceTyped; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestPutMappingAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger( - LogManager.getLogger(RestPutMappingAction.class)); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in put " + - "mapping requests is deprecated. The parameter will be removed in the next major version."; + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestPutMappingAction.class)); + public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in put " + + "mapping requests is deprecated. The parameter will be removed in the next major version."; public RestPutMappingAction(RestController controller) { controller.registerHandler(PUT, "/{index}/_mapping/", this); - controller.registerHandler(PUT, "/{index}/{type}/_mapping", this); - controller.registerHandler(PUT, "/{index}/_mapping/{type}", this); - controller.registerHandler(PUT, "/_mapping/{type}", this); - controller.registerHandler(POST, "/{index}/_mapping/", this); - controller.registerHandler(POST, "/{index}/{type}/_mapping", this); - controller.registerHandler(POST, "/{index}/_mapping/{type}", this); - controller.registerHandler(POST, "/_mapping/{type}", this); //register the same paths, but with plural form _mappings controller.registerHandler(PUT, "/{index}/_mappings/", this); - controller.registerHandler(PUT, "/{index}/{type}/_mappings", this); - controller.registerHandler(PUT, "/{index}/_mappings/{type}", this); - controller.registerHandler(PUT, "/_mappings/{type}", this); - controller.registerHandler(POST, "/{index}/_mappings/", this); - controller.registerHandler(POST, "/{index}/{type}/_mappings", this); - controller.registerHandler(POST, "/{index}/_mappings/{type}", this); - controller.registerHandler(POST, "/_mappings/{type}", this); } @Override @@ -76,23 +60,19 @@ public class RestPutMappingAction extends BaseRestHandler { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, - DEFAULT_INCLUDE_TYPE_NAME_POLICY); + PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index"))); + + final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { deprecationLogger.deprecatedAndMaybeLog("put_mapping_with_types", TYPES_DEPRECATION_MESSAGE); } - PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index"))); - - final String type = request.param("type"); - putMappingRequest.type(includeTypeName ? type : MapperService.SINGLE_MAPPING_NAME); + putMappingRequest.type(MapperService.SINGLE_MAPPING_NAME); Map sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false, request.getXContentType()).v2(); - if (includeTypeName == false && - (type != null || isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, sourceAsMap))) { - throw new IllegalArgumentException("Types cannot be provided in put mapping requests, unless " + - "the include_type_name parameter is set to true."); + if (includeTypeName == false && MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, sourceAsMap)) { + throw new IllegalArgumentException("Types cannot be provided in put mapping requests"); } putMappingRequest.source(sourceAsMap); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java index 39e9b3fa852d..d176dad12e19 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.rest.action.admin.indices; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.admin.indices.validate.query.QueryExplanation; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; @@ -27,7 +26,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -44,18 +42,12 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestStatus.OK; public class RestValidateQueryAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger( - LogManager.getLogger(RestValidateQueryAction.class)); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + - " Specifying types in validate query requests is deprecated."; public RestValidateQueryAction(RestController controller) { controller.registerHandler(GET, "/_validate/query", this); controller.registerHandler(POST, "/_validate/query", this); controller.registerHandler(GET, "/{index}/_validate/query", this); controller.registerHandler(POST, "/{index}/_validate/query", this); - controller.registerHandler(GET, "/{index}/{type}/_validate/query", this); - controller.registerHandler(POST, "/{index}/{type}/_validate/query", this); } @Override @@ -68,12 +60,6 @@ public class RestValidateQueryAction extends BaseRestHandler { ValidateQueryRequest validateQueryRequest = new ValidateQueryRequest(Strings.splitStringByCommaToArray(request.param("index"))); validateQueryRequest.indicesOptions(IndicesOptions.fromRequest(request, validateQueryRequest.indicesOptions())); validateQueryRequest.explain(request.paramAsBoolean("explain", false)); - - if (request.hasParam("type")) { - deprecationLogger.deprecatedAndMaybeLog("validate_query_with_types", TYPES_DEPRECATION_MESSAGE); - validateQueryRequest.types(Strings.splitStringByCommaToArray(request.param("type"))); - } - validateQueryRequest.rewrite(request.paramAsBoolean("rewrite", false)); validateQueryRequest.allShards(request.paramAsBoolean("all_shards", false)); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 24050479be25..5a50b0ff04ce 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -42,7 +42,6 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -227,11 +226,6 @@ public class RestIndicesAction extends AbstractCatAction { @Override public void onFailure(final Exception e) { - // Temporary logging to help debug https://github.com/elastic/elasticsearch/issues/45652 - // TODO: remove this when we understand why _cat/indices sometimes returns a 404 - if (e instanceof IndexNotFoundException) { - logger.debug("_cat/indices returning index_not_found_exception", e); - } listener.onFailure(e); } }, size); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java index 14fb06039609..c9cb5a5fb0f5 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java @@ -19,14 +19,12 @@ package org.elasticsearch.rest.action.document; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.rest.BaseRestHandler; @@ -50,15 +48,9 @@ import static org.elasticsearch.rest.RestStatus.OK; */ public class RestGetSourceAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetSourceAction.class)); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in get_source and exist_source" - + "requests is deprecated."; - public RestGetSourceAction(final RestController controller) { controller.registerHandler(GET, "/{index}/_source/{id}", this); controller.registerHandler(HEAD, "/{index}/_source/{id}", this); - controller.registerHandler(GET, "/{index}/{type}/{id}/_source", this); - controller.registerHandler(HEAD, "/{index}/{type}/{id}/_source", this); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java index bcaea4e65f0b..5613540edad8 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java @@ -19,11 +19,9 @@ package org.elasticsearch.rest.action.search; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.explain.ExplainRequest; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -41,18 +39,10 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; * Rest action for computing a score explanation for specific documents. */ public class RestExplainAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger( - LogManager.getLogger(RestExplainAction.class)); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] " + - "Specifying a type in explain requests is deprecated."; public RestExplainAction(RestController controller) { controller.registerHandler(GET, "/{index}/_explain/{id}", this); controller.registerHandler(POST, "/{index}/_explain/{id}", this); - - // Deprecated typed endpoints. - controller.registerHandler(GET, "/{index}/{type}/{id}/_explain", this); - controller.registerHandler(POST, "/{index}/{type}/{id}/_explain", this); } @Override @@ -62,15 +52,7 @@ public class RestExplainAction extends BaseRestHandler { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - ExplainRequest explainRequest; - if (request.hasParam("type")) { - deprecationLogger.deprecatedAndMaybeLog("explain_with_types", TYPES_DEPRECATION_MESSAGE); - explainRequest = new ExplainRequest(request.param("index"), - request.param("type"), - request.param("id")); - } else { - explainRequest = new ExplainRequest(request.param("index"), request.param("id")); - } + ExplainRequest explainRequest = new ExplainRequest(request.param("index"), request.param("id")); explainRequest.parent(request.param("parent")); explainRequest.routing(request.param("routing")); diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index 9a272bbff07f..39d022d169d2 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -260,7 +260,7 @@ final class DefaultSearchContext extends SearchContext { try { this.query = searcher.rewrite(query); } catch (IOException e) { - throw new QueryPhaseExecutionException(this, "Failed to rewrite main query", e); + throw new QueryPhaseExecutionException(shardTarget, "Failed to rewrite main query", e); } } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index bbb19b83cd9f..23fb3d8d628b 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -21,6 +21,7 @@ package org.elasticsearch.search; import org.apache.lucene.search.Explanation; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -85,7 +86,6 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable fields) { - this(docId, id, type, null, fields); + public SearchHit(int docId, String id, Map fields) { + this(docId, id, null, fields); } - public SearchHit(int nestedTopDocId, String id, Text type, NestedIdentity nestedIdentity, Map fields) { + public SearchHit(int nestedTopDocId, String id, NestedIdentity nestedIdentity, Map fields) { this.docId = nestedTopDocId; if (id != null) { this.id = new Text(id); } else { this.id = null; } - this.type = type; this.nestedIdentity = nestedIdentity; this.fields = fields; } @@ -143,7 +142,9 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterablenull is returned. */ @@ -535,7 +529,6 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable, Void> parser) { declareMetaDataFields(parser); - parser.declareString((map, value) -> map.put(Fields._TYPE, new Text(value)), new ParseField(Fields._TYPE)); parser.declareString((map, value) -> map.put(Fields._INDEX, value), new ParseField(Fields._INDEX)); parser.declareString((map, value) -> map.put(Fields._ID, value), new ParseField(Fields._ID)); parser.declareString((map, value) -> map.put(Fields._NODE, value), new ParseField(Fields._NODE)); @@ -716,11 +705,10 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable values) { String id = get(Fields._ID, values, null); - Text type = get(Fields._TYPE, values, null); NestedIdentity nestedIdentity = get(NestedIdentity._NESTED, values, null); Map fields = get(Fields.FIELDS, values, Collections.emptyMap()); - SearchHit searchHit = new SearchHit(-1, id, type, nestedIdentity, fields); + SearchHit searchHit = new SearchHit(-1, id, nestedIdentity, fields); String index = get(Fields._INDEX, values, null); String clusterAlias = null; if (index != null) { @@ -786,8 +774,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable, Void> parser) { for (String metadatafield : MapperService.getAllMetaFields()) { - if (metadatafield.equals(Fields._ID) == false && metadatafield.equals(Fields._INDEX) == false - && metadatafield.equals(Fields._TYPE) == false) { + if (metadatafield.equals(Fields._ID) == false && metadatafield.equals(Fields._INDEX) == false) { if (metadatafield.equals(IgnoredFieldMapper.NAME)) { parser.declareObjectArray((map, list) -> { @SuppressWarnings("unchecked") @@ -895,7 +882,6 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable 0) { if (context.scrollContext() != null) { - throw new SearchContextException(context, "`search_after` cannot be used in a scroll context."); + throw new SearchException(shardTarget, "`search_after` cannot be used in a scroll context."); } if (context.from() > 0) { - throw new SearchContextException(context, "`from` parameter must be set to 0 when `search_after` is used."); + throw new SearchException(shardTarget, "`from` parameter must be set to 0 when `search_after` is used."); } FieldDoc fieldDoc = SearchAfterBuilder.buildFieldDoc(context.sort(), source.searchAfter()); context.searchAfter(fieldDoc); @@ -880,7 +881,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv if (source.slice() != null) { if (context.scrollContext() == null) { - throw new SearchContextException(context, "`slice` cannot be used outside of a scroll context"); + throw new SearchException(shardTarget, "`slice` cannot be used outside of a scroll context"); } context.sliceBuilder(source.slice()); } @@ -888,10 +889,10 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv if (source.storedFields() != null) { if (source.storedFields().fetchFields() == false) { if (context.version()) { - throw new SearchContextException(context, "`stored_fields` cannot be disabled if version is requested"); + throw new SearchException(shardTarget, "`stored_fields` cannot be disabled if version is requested"); } if (context.sourceRequested()) { - throw new SearchContextException(context, "`stored_fields` cannot be disabled if _source is requested"); + throw new SearchException(shardTarget, "`stored_fields` cannot be disabled if _source is requested"); } } context.storedFieldsContext(source.storedFields()); @@ -899,13 +900,13 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv if (source.collapse() != null) { if (context.scrollContext() != null) { - throw new SearchContextException(context, "cannot use `collapse` in a scroll context"); + throw new SearchException(shardTarget, "cannot use `collapse` in a scroll context"); } if (context.searchAfter() != null) { - throw new SearchContextException(context, "cannot use `collapse` in conjunction with `search_after`"); + throw new SearchException(shardTarget, "cannot use `collapse` in conjunction with `search_after`"); } if (context.rescore() != null && context.rescore().isEmpty() == false) { - throw new SearchContextException(context, "cannot use `collapse` in conjunction with `rescore`"); + throw new SearchException(shardTarget, "cannot use `collapse` in conjunction with `rescore`"); } final CollapseContext collapseContext = source.collapse().build(queryShardContext); context.collapse(collapseContext); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java index 851997ab4685..c150f99cfe19 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java @@ -116,7 +116,7 @@ public class AggregationPhase implements SearchPhase { globalsCollector.preCollection(); context.searcher().search(query, collector); } catch (Exception e) { - throw new QueryPhaseExecutionException(context, "Failed to execute global aggregators", e); + throw new QueryPhaseExecutionException(context.shardTarget(), "Failed to execute global aggregators", e); } finally { context.clearReleasables(SearchContext.Lifetime.COLLECTION); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java index ec276b7e01b8..ea8970052179 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext.Lifetime; @@ -76,11 +77,11 @@ public abstract class AggregatorBase extends Aggregator { assert factories != null : "sub-factories provided to BucketAggregator must not be null, use AggragatorFactories.EMPTY instead"; this.subAggregators = factories.createSubAggregators(context, this); context.addReleasable(this, Lifetime.PHASE); + final SearchShardTarget shardTarget = context.shardTarget(); // Register a safeguard to highlight any invalid construction logic (call to this constructor without subsequent preCollection call) collectableSubAggregators = new BucketCollector() { void badState(){ - throw new QueryPhaseExecutionException(AggregatorBase.this.context, - "preCollection not called on new Aggregator before use", null); + throw new QueryPhaseExecutionException(shardTarget, "preCollection not called on new Aggregator before use", null); } @Override public LeafBucketCollector getLeafCollector(LeafReaderContext reader) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorFactory.java index 2c4d73fc7528..fa5ecf296540 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorFactory.java @@ -134,7 +134,7 @@ class ScriptedMetricAggregatorFactory extends AggregatorFactory { || original instanceof Boolean) { clone = original; } else { - throw new SearchParseException(context, + throw new SearchParseException(context.shardTarget(), "Can only clone primitives, String, ArrayList, and HashMap. Found: " + original.getClass().getCanonicalName(), null); } return clone; diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java index 3cbc47173c6b..35df8e322adc 100644 --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java @@ -95,7 +95,7 @@ public class DfsPhase implements SearchPhase { .fieldStatistics(fieldStatistics) .maxDoc(context.searcher().getIndexReader().maxDoc()); } catch (Exception e) { - throw new DfsPhaseExecutionException(context, "Exception during dfs phase", e); + throw new DfsPhaseExecutionException(context.shardTarget(), "Exception during dfs phase", e); } } diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhaseExecutionException.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhaseExecutionException.java index f493bb4d0523..2a9bd4a8f11c 100644 --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhaseExecutionException.java +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhaseExecutionException.java @@ -20,19 +20,19 @@ package org.elasticsearch.search.dfs; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.search.SearchContextException; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.SearchException; +import org.elasticsearch.search.SearchShardTarget; import java.io.IOException; -public class DfsPhaseExecutionException extends SearchContextException { +public class DfsPhaseExecutionException extends SearchException { - public DfsPhaseExecutionException(SearchContext context, String msg, Throwable t) { - super(context, "Dfs Failed [" + msg + "]", t); + public DfsPhaseExecutionException(SearchShardTarget shardTarget, String msg, Throwable t) { + super(shardTarget, "Dfs Failed [" + msg + "]", t); } - public DfsPhaseExecutionException(SearchContext context, String msg) { - super(context, "Dfs Failed [" + msg + "]"); + public DfsPhaseExecutionException(SearchShardTarget shardTarget, String msg) { + super(shardTarget, "Dfs Failed [" + msg + "]"); } public DfsPhaseExecutionException(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 196e74e0bbab..7f03458dc394 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -51,6 +50,7 @@ import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhase; +import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.InnerHitsContext; import org.elasticsearch.search.fetch.subphase.InnerHitsFetchSubPhase; @@ -204,16 +204,14 @@ public class FetchPhase implements SearchPhase { int subDocId, Map> storedToRequestedFields, LeafReaderContext subReaderContext) { - DocumentMapper documentMapper = context.mapperService().documentMapper(); - Text typeText = documentMapper.typeText(); if (fieldsVisitor == null) { - return new SearchHit(docId, null, typeText, null); + return new SearchHit(docId, null, null); } Map searchFields = getSearchFields(context, fieldsVisitor, subDocId, storedToRequestedFields, subReaderContext); - SearchHit searchHit = new SearchHit(docId, fieldsVisitor.uid().id(), typeText, searchFields); + SearchHit searchHit = new SearchHit(docId, fieldsVisitor.uid().id(), searchFields); // Set _source if requested. SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument(subReaderContext, subDocId); @@ -228,7 +226,7 @@ public class FetchPhase implements SearchPhase { int subDocId, Map> storedToRequestedFields, LeafReaderContext subReaderContext) { - loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId); + loadStoredFields(context.shardTarget(), subReaderContext, fieldsVisitor, subDocId); fieldsVisitor.postProcess(context.mapperService()); if (fieldsVisitor.fields().isEmpty()) { @@ -265,7 +263,7 @@ public class FetchPhase implements SearchPhase { final boolean needSource = context.sourceRequested() || context.highlight() != null; if (needSource || (context instanceof InnerHitsContext.InnerHitSubContext == false)) { FieldsVisitor rootFieldsVisitor = new FieldsVisitor(needSource); - loadStoredFields(context, subReaderContext, rootFieldsVisitor, rootSubDocId); + loadStoredFields(context.shardTarget(), subReaderContext, rootFieldsVisitor, rootSubDocId); rootFieldsVisitor.postProcess(context.mapperService()); uid = rootFieldsVisitor.uid(); source = rootFieldsVisitor.source(); @@ -339,7 +337,7 @@ public class FetchPhase implements SearchPhase { XContentType contentType = tuple.v1(); context.lookup().source().setSourceContentType(contentType); } - return new SearchHit(nestedTopDocId, uid.id(), documentMapper.typeText(), nestedIdentity, searchFields); + return new SearchHit(nestedTopDocId, uid.id(), nestedIdentity, searchFields); } private SearchHit.NestedIdentity getInternalNestedIdentity(SearchContext context, int nestedSubDocId, @@ -405,12 +403,12 @@ public class FetchPhase implements SearchPhase { return nestedIdentity; } - private void loadStoredFields(SearchContext searchContext, LeafReaderContext readerContext, FieldsVisitor fieldVisitor, int docId) { + private void loadStoredFields(SearchShardTarget shardTarget, LeafReaderContext readerContext, FieldsVisitor fieldVisitor, int docId) { fieldVisitor.reset(); try { readerContext.reader().document(docId, fieldVisitor); } catch (IOException e) { - throw new FetchPhaseExecutionException(searchContext, "Failed to fetch doc id [" + docId + "]", e); + throw new FetchPhaseExecutionException(shardTarget, "Failed to fetch doc id [" + docId + "]", e); } } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseExecutionException.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseExecutionException.java index e3fb542134eb..7bf8b878c0d6 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseExecutionException.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseExecutionException.java @@ -20,19 +20,19 @@ package org.elasticsearch.search.fetch; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.search.SearchContextException; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.SearchException; +import org.elasticsearch.search.SearchShardTarget; import java.io.IOException; -public class FetchPhaseExecutionException extends SearchContextException { +public class FetchPhaseExecutionException extends SearchException { - public FetchPhaseExecutionException(SearchContext context, String msg, Throwable t) { - super(context, "Fetch Failed [" + msg + "]", t); + public FetchPhaseExecutionException(SearchShardTarget shardTarget, String msg, Throwable t) { + super(shardTarget, "Fetch Failed [" + msg + "]", t); } - public FetchPhaseExecutionException(SearchContext context, String msg) { - super(context, "Fetch Failed [" + msg + "]"); + public FetchPhaseExecutionException(SearchShardTarget shardTarget, String msg) { + super(shardTarget, "Fetch Failed [" + msg + "]"); } public FetchPhaseExecutionException(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java index c177cc8c3aed..3a7d5c96b817 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java @@ -46,8 +46,8 @@ public final class ExplainFetchSubPhase implements FetchSubPhase { // we use the top level doc id, since we work with the top level searcher hitContext.hit().explanation(explanation); } catch (IOException e) { - throw new FetchPhaseExecutionException(context, "Failed to explain doc [" + hitContext.hit().getType() + "#" - + hitContext.hit().getId() + "]", e); + throw new FetchPhaseExecutionException(context.shardTarget(), + "Failed to explain doc [" + hitContext.hit().getId() + "]", e); } finally { context.clearReleasables(SearchContext.Lifetime.COLLECTION); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java index 2e177e59e062..385a67252f5d 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java @@ -22,6 +22,7 @@ package org.elasticsearch.search.fetch.subphase; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.query.InnerHitContextBuilder; import org.elasticsearch.search.SearchHit; @@ -71,7 +72,7 @@ public final class InnerHitsFetchSubPhase implements FetchSubPhase { docIdsToLoad[j] = topDoc.topDocs.scoreDocs[j].doc; } innerHits.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length); - innerHits.setUid(new Uid(hit.getType(), hit.getId())); + innerHits.setUid(new Uid(MapperService.SINGLE_MAPPING_NAME, hit.getId())); fetchPhase.execute(innerHits); FetchSearchResult fetchResult = innerHits.fetchResult(); SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits(); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java index 358b652fbdf9..d9cc0d0be06b 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java @@ -203,7 +203,7 @@ public class FastVectorHighlighter implements Highlighter { return null; } catch (Exception e) { - throw new FetchPhaseExecutionException(context, + throw new FetchPhaseExecutionException(context.shardTarget(), "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index b4cbd0311675..3dfd4091dde8 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -55,10 +55,10 @@ public class HighlightPhase implements FetchSubPhase { } if (context.highlight().forceSource(field)) { - SourceFieldMapper sourceFieldMapper = context.mapperService().documentMapper(hitContext.hit().getType()).sourceMapper(); + SourceFieldMapper sourceFieldMapper = context.mapperService().documentMapper().sourceMapper(); if (!sourceFieldMapper.enabled()) { throw new IllegalArgumentException("source is forced for fields " + fieldNamesToHighlight - + " but type [" + hitContext.hit().getType() + "] has disabled _source"); + + " but _source is disabled"); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java index 6ad155104a4c..320141b3d456 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java @@ -101,7 +101,7 @@ public class PlainHighlighter implements Highlighter { int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? 1 : field.fieldOptions().numberOfFragments(); ArrayList fragsList = new ArrayList<>(); List textsToHighlight; - Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().getType()).mappers().indexAnalyzer(); + Analyzer analyzer = context.mapperService().documentMapper().mappers().indexAnalyzer(); final int maxAnalyzedOffset = context.indexShard().indexSettings().getHighlightMaxAnalyzedOffset(); try { @@ -139,7 +139,8 @@ public class PlainHighlighter implements Highlighter { // the plain highlighter will parse the source and try to analyze it. return null; } else { - throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); + throw new FetchPhaseExecutionException(context.shardTarget(), + "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } } if (field.fieldOptions().scoreOrdered()) { @@ -178,7 +179,8 @@ public class PlainHighlighter implements Highlighter { try { end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, fieldType.name(), fieldContents); } catch (Exception e) { - throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); + throw new FetchPhaseExecutionException(context.shardTarget(), + "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } if (end > 0) { return new HighlightField(highlighterContext.fieldName, new Text[] { new Text(fieldContents.substring(0, end)) }); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java index b806fb9cd312..394bcf7600dd 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java @@ -56,7 +56,7 @@ public class UnifiedHighlighter implements Highlighter { public boolean canHighlight(MappedFieldType fieldType) { return true; } - + @Override public HighlightField highlight(HighlighterContext highlighterContext) { MappedFieldType fieldType = highlighterContext.fieldType; @@ -70,8 +70,7 @@ public class UnifiedHighlighter implements Highlighter { int numberOfFragments; try { - final Analyzer analyzer = getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), - hitContext); + final Analyzer analyzer = getAnalyzer(context.mapperService().documentMapper(), hitContext); List fieldValues = loadFieldValues(fieldType, field, context, hitContext); if (fieldValues.size() == 0) { return null; @@ -123,7 +122,7 @@ public class UnifiedHighlighter implements Highlighter { } } } catch (IOException e) { - throw new FetchPhaseExecutionException(context, + throw new FetchPhaseExecutionException(context.shardTarget(), "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } @@ -149,11 +148,11 @@ public class UnifiedHighlighter implements Highlighter { return passageFormatter; } - + protected Analyzer getAnalyzer(DocumentMapper docMapper, HitContext hitContext) { return docMapper.mappers().indexAnalyzer(); } - + protected List loadFieldValues(MappedFieldType fieldType, SearchContextHighlight.Field field, SearchContext context, FetchSubPhase.HitContext hitContext) throws IOException { List fieldValues = HighlightUtils.loadFieldValues(field, fieldType, context, hitContext); diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java deleted file mode 100644 index 70c2aa6e5ac6..000000000000 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java +++ /dev/null @@ -1,269 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.internal; - -import org.elasticsearch.Version; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.query.QueryRewriteContext; -import org.elasticsearch.index.query.Rewriteable; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.Scroll; -import org.elasticsearch.search.builder.SearchSourceBuilder; - -import java.io.IOException; -import java.util.Arrays; - -/** - * Shard level search request that gets created and consumed on the local node. - * Used directly by api that need to create a search context within their execution. - * - * Source structure: - *
    - * {
    - *  from : 0, size : 20, (optional, can be set on the request)
    - *  sort : { "name.first" : {}, "name.last" : { reverse : true } }
    - *  fields : [ "name.first", "name.last" ]
    - *  query : { ... }
    - *  aggs : {
    - *      "agg1" : {
    - *          terms : { ... }
    - *      }
    - *  }
    - * }
    - * 
    - */ -public class ShardSearchLocalRequest implements ShardSearchRequest { - private final String clusterAlias; - private final ShardId shardId; - private final int numberOfShards; - private final SearchType searchType; - private final Scroll scroll; - private final float indexBoost; - private final Boolean requestCache; - private final long nowInMillis; - private final boolean allowPartialSearchResults; - private final String[] indexRoutings; - private final String preference; - //these are the only two mutable fields, as they are subject to rewriting - private AliasFilter aliasFilter; - private SearchSourceBuilder source; - - public ShardSearchLocalRequest(SearchRequest searchRequest, ShardId shardId, int numberOfShards, AliasFilter aliasFilter, - float indexBoost, long nowInMillis, @Nullable String clusterAlias, String[] indexRoutings) { - this(shardId, numberOfShards, searchRequest.searchType(), searchRequest.source(), - searchRequest.requestCache(), aliasFilter, indexBoost, searchRequest.allowPartialSearchResults(), indexRoutings, - searchRequest.preference(), searchRequest.scroll(), nowInMillis, clusterAlias); - // If allowPartialSearchResults is unset (ie null), the cluster-level default should have been substituted - // at this stage. Any NPEs in the above are therefore an error in request preparation logic. - assert searchRequest.allowPartialSearchResults() != null; - } - - public ShardSearchLocalRequest(ShardId shardId, long nowInMillis, AliasFilter aliasFilter) { - this(shardId, -1, null, null, null, aliasFilter, 1.0f, false, Strings.EMPTY_ARRAY, null, null, nowInMillis, null); - } - - private ShardSearchLocalRequest(ShardId shardId, int numberOfShards, SearchType searchType, SearchSourceBuilder source, - Boolean requestCache, AliasFilter aliasFilter, float indexBoost, boolean allowPartialSearchResults, - String[] indexRoutings, String preference, Scroll scroll, long nowInMillis, - @Nullable String clusterAlias) { - this.shardId = shardId; - this.numberOfShards = numberOfShards; - this.searchType = searchType; - this.source = source; - this.requestCache = requestCache; - this.aliasFilter = aliasFilter; - this.indexBoost = indexBoost; - this.allowPartialSearchResults = allowPartialSearchResults; - this.indexRoutings = indexRoutings; - this.preference = preference; - this.scroll = scroll; - this.nowInMillis = nowInMillis; - this.clusterAlias = clusterAlias; - } - - ShardSearchLocalRequest(StreamInput in) throws IOException { - shardId = new ShardId(in); - searchType = SearchType.fromId(in.readByte()); - numberOfShards = in.readVInt(); - scroll = in.readOptionalWriteable(Scroll::new); - source = in.readOptionalWriteable(SearchSourceBuilder::new); - if (in.getVersion().before(Version.V_8_0_0)) { - // types no longer relevant so ignore - String[] types = in.readStringArray(); - if (types.length > 0) { - throw new IllegalStateException( - "types are no longer supported in search requests but found [" + Arrays.toString(types) + "]"); - } - } - aliasFilter = new AliasFilter(in); - indexBoost = in.readFloat(); - nowInMillis = in.readVLong(); - requestCache = in.readOptionalBoolean(); - clusterAlias = in.readOptionalString(); - allowPartialSearchResults = in.readBoolean(); - indexRoutings = in.readStringArray(); - preference = in.readOptionalString(); - } - - protected final void innerWriteTo(StreamOutput out, boolean asKey) throws IOException { - shardId.writeTo(out); - out.writeByte(searchType.id()); - if (!asKey) { - out.writeVInt(numberOfShards); - } - out.writeOptionalWriteable(scroll); - out.writeOptionalWriteable(source); - if (out.getVersion().before(Version.V_8_0_0)) { - // types not supported so send an empty array to previous versions - out.writeStringArray(Strings.EMPTY_ARRAY); - } - aliasFilter.writeTo(out); - out.writeFloat(indexBoost); - if (asKey == false) { - out.writeVLong(nowInMillis); - } - out.writeOptionalBoolean(requestCache); - out.writeOptionalString(clusterAlias); - out.writeBoolean(allowPartialSearchResults); - if (asKey == false) { - out.writeStringArray(indexRoutings); - out.writeOptionalString(preference); - } - } - - @Override - public ShardId shardId() { - return shardId; - } - - @Override - public SearchSourceBuilder source() { - return source; - } - - @Override - public AliasFilter getAliasFilter() { - return aliasFilter; - } - - @Override - public void setAliasFilter(AliasFilter aliasFilter) { - this.aliasFilter = aliasFilter; - } - - @Override - public void source(SearchSourceBuilder source) { - this.source = source; - } - - @Override - public int numberOfShards() { - return numberOfShards; - } - - @Override - public SearchType searchType() { - return searchType; - } - - @Override - public float indexBoost() { - return indexBoost; - } - - @Override - public long nowInMillis() { - return nowInMillis; - } - - @Override - public Boolean requestCache() { - return requestCache; - } - - @Override - public boolean allowPartialSearchResults() { - return allowPartialSearchResults; - } - - @Override - public Scroll scroll() { - return scroll; - } - - @Override - public String[] indexRoutings() { - return indexRoutings; - } - - @Override - public String preference() { - return preference; - } - - @Override - public BytesReference cacheKey() throws IOException { - BytesStreamOutput out = new BytesStreamOutput(); - this.innerWriteTo(out, true); - // copy it over, most requests are small, we might as well copy to make sure we are not sliced... - // we could potentially keep it without copying, but then pay the price of extra unused bytes up to a page - return new BytesArray(out.bytes().toBytesRef(), true);// do a deep copy - } - - @Override - public String getClusterAlias() { - return clusterAlias; - } - - @Override - public Rewriteable getRewriteable() { - return new RequestRewritable(this); - } - - static class RequestRewritable implements Rewriteable { - - final ShardSearchRequest request; - - RequestRewritable(ShardSearchRequest request) { - this.request = request; - } - - @Override - public Rewriteable rewrite(QueryRewriteContext ctx) throws IOException { - SearchSourceBuilder newSource = request.source() == null ? null : Rewriteable.rewrite(request.source(), ctx); - AliasFilter newAliasFilter = Rewriteable.rewrite(request.getAliasFilter(), ctx); - if (newSource == request.source() && newAliasFilter == request.getAliasFilter()) { - return this; - } else { - request.source(newSource); - request.setAliasFilter(newAliasFilter); - return new RequestRewritable(request); - } - } - } -} diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java index 0e9d5de9788f..2d736847e701 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java @@ -19,72 +19,313 @@ package org.elasticsearch.search.internal; +import org.elasticsearch.Version; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchTask; import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.AliasFilterParsingException; import org.elasticsearch.indices.InvalidAliasNameException; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.transport.TransportRequest; import java.io.IOException; +import java.util.Arrays; +import java.util.Map; import java.util.function.Function; /** * Shard level request that represents a search. - * It provides all the methods that the {@link org.elasticsearch.search.internal.SearchContext} needs. + * It provides all the methods that the {@link SearchContext} needs. * Provides a cache key based on its content that can be used to cache shard level response. */ -public interface ShardSearchRequest { +public class ShardSearchRequest extends TransportRequest implements IndicesRequest { + private final String clusterAlias; + private final ShardId shardId; + private final int numberOfShards; + private final SearchType searchType; + private final Scroll scroll; + private final float indexBoost; + private final Boolean requestCache; + private final long nowInMillis; + private final boolean allowPartialSearchResults; + private final String[] indexRoutings; + private final String preference; + private final OriginalIndices originalIndices; - ShardId shardId(); + //these are the only two mutable fields, as they are subject to rewriting + private AliasFilter aliasFilter; + private SearchSourceBuilder source; - SearchSourceBuilder source(); + public ShardSearchRequest(OriginalIndices originalIndices, + SearchRequest searchRequest, + ShardId shardId, + int numberOfShards, + AliasFilter aliasFilter, + float indexBoost, + long nowInMillis, + @Nullable String clusterAlias, + String[] indexRoutings) { + this(originalIndices, + shardId, + numberOfShards, + searchRequest.searchType(), + searchRequest.source(), + searchRequest.requestCache(), + aliasFilter, + indexBoost, + searchRequest.allowPartialSearchResults(), + indexRoutings, + searchRequest.preference(), + searchRequest.scroll(), + nowInMillis, + clusterAlias); + // If allowPartialSearchResults is unset (ie null), the cluster-level default should have been substituted + // at this stage. Any NPEs in the above are therefore an error in request preparation logic. + assert searchRequest.allowPartialSearchResults() != null; + } - AliasFilter getAliasFilter(); + public ShardSearchRequest(ShardId shardId, + long nowInMillis, + AliasFilter aliasFilter) { + this(OriginalIndices.NONE, shardId, -1, null, null, null, + aliasFilter, 1.0f, false, Strings.EMPTY_ARRAY, null, null, nowInMillis, null); + } - void setAliasFilter(AliasFilter filter); + private ShardSearchRequest(OriginalIndices originalIndices, + ShardId shardId, + int numberOfShards, + SearchType searchType, + SearchSourceBuilder source, + Boolean requestCache, + AliasFilter aliasFilter, + float indexBoost, + boolean allowPartialSearchResults, + String[] indexRoutings, + String preference, + Scroll scroll, + long nowInMillis, + @Nullable String clusterAlias) { + this.shardId = shardId; + this.numberOfShards = numberOfShards; + this.searchType = searchType; + this.source = source; + this.requestCache = requestCache; + this.aliasFilter = aliasFilter; + this.indexBoost = indexBoost; + this.allowPartialSearchResults = allowPartialSearchResults; + this.indexRoutings = indexRoutings; + this.preference = preference; + this.scroll = scroll; + this.nowInMillis = nowInMillis; + this.clusterAlias = clusterAlias; + this.originalIndices = originalIndices; + } - void source(SearchSourceBuilder source); + public ShardSearchRequest(StreamInput in) throws IOException { + super(in); + shardId = new ShardId(in); + searchType = SearchType.fromId(in.readByte()); + numberOfShards = in.readVInt(); + scroll = in.readOptionalWriteable(Scroll::new); + source = in.readOptionalWriteable(SearchSourceBuilder::new); + if (in.getVersion().before(Version.V_8_0_0)) { + // types no longer relevant so ignore + String[] types = in.readStringArray(); + if (types.length > 0) { + throw new IllegalStateException( + "types are no longer supported in search requests but found [" + Arrays.toString(types) + "]"); + } + } + aliasFilter = new AliasFilter(in); + indexBoost = in.readFloat(); + nowInMillis = in.readVLong(); + requestCache = in.readOptionalBoolean(); + clusterAlias = in.readOptionalString(); + allowPartialSearchResults = in.readBoolean(); + indexRoutings = in.readStringArray(); + preference = in.readOptionalString(); + originalIndices = OriginalIndices.readOriginalIndices(in); + } - int numberOfShards(); + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + innerWriteTo(out, false); + OriginalIndices.writeOriginalIndices(originalIndices, out); + } - SearchType searchType(); + protected final void innerWriteTo(StreamOutput out, boolean asKey) throws IOException { + shardId.writeTo(out); + out.writeByte(searchType.id()); + if (!asKey) { + out.writeVInt(numberOfShards); + } + out.writeOptionalWriteable(scroll); + out.writeOptionalWriteable(source); + if (out.getVersion().before(Version.V_8_0_0)) { + // types not supported so send an empty array to previous versions + out.writeStringArray(Strings.EMPTY_ARRAY); + } + aliasFilter.writeTo(out); + out.writeFloat(indexBoost); + if (asKey == false) { + out.writeVLong(nowInMillis); + } + out.writeOptionalBoolean(requestCache); + out.writeOptionalString(clusterAlias); + out.writeBoolean(allowPartialSearchResults); + if (asKey == false) { + out.writeStringArray(indexRoutings); + out.writeOptionalString(preference); + } + } - float indexBoost(); + @Override + public String[] indices() { + if (originalIndices == null) { + return null; + } + return originalIndices.indices(); + } - long nowInMillis(); + @Override + public IndicesOptions indicesOptions() { + if (originalIndices == null) { + return null; + } + return originalIndices.indicesOptions(); + } - Boolean requestCache(); + public ShardId shardId() { + return shardId; + } - boolean allowPartialSearchResults(); + public SearchSourceBuilder source() { + return source; + } - Scroll scroll(); + public AliasFilter getAliasFilter() { + return aliasFilter; + } - /** - * Returns the routing values resolved by the coordinating node for the index pointed by {@link #shardId()}. - */ - String[] indexRoutings(); + public void setAliasFilter(AliasFilter aliasFilter) { + this.aliasFilter = aliasFilter; + } - /** - * Returns the preference of the original {@link SearchRequest#preference()}. - */ - String preference(); + public void source(SearchSourceBuilder source) { + this.source = source; + } + + public int numberOfShards() { + return numberOfShards; + } + + public SearchType searchType() { + return searchType; + } + + public float indexBoost() { + return indexBoost; + } + + public long nowInMillis() { + return nowInMillis; + } + + public Boolean requestCache() { + return requestCache; + } + + public boolean allowPartialSearchResults() { + return allowPartialSearchResults; + } + + public Scroll scroll() { + return scroll; + } + + public String[] indexRoutings() { + return indexRoutings; + } + + public String preference() { + return preference; + } /** * Returns the cache key for this shard search request, based on its content */ - BytesReference cacheKey() throws IOException; + public BytesReference cacheKey() throws IOException { + BytesStreamOutput out = new BytesStreamOutput(); + this.innerWriteTo(out, true); + // copy it over, most requests are small, we might as well copy to make sure we are not sliced... + // we could potentially keep it without copying, but then pay the price of extra unused bytes up to a page + return new BytesArray(out.bytes().toBytesRef(), true);// do a deep copy + } + + public String getClusterAlias() { + return clusterAlias; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new SearchTask(id, type, action, getDescription(), parentTaskId, headers); + } + + @Override + public String getDescription() { + // Shard id is enough here, the request itself can be found by looking at the parent task description + return "shardId[" + shardId() + "]"; + } + + public Rewriteable getRewriteable() { + return new RequestRewritable(this); + } + + static class RequestRewritable implements Rewriteable { + + final ShardSearchRequest request; + + RequestRewritable(ShardSearchRequest request) { + this.request = request; + } + + @Override + public Rewriteable rewrite(QueryRewriteContext ctx) throws IOException { + SearchSourceBuilder newSource = request.source() == null ? null : Rewriteable.rewrite(request.source(), ctx); + AliasFilter newAliasFilter = Rewriteable.rewrite(request.getAliasFilter(), ctx); + if (newSource == request.source() && newAliasFilter == request.getAliasFilter()) { + return this; + } else { + request.source(newSource); + request.setAliasFilter(newAliasFilter); + return new RequestRewritable(request); + } + } + } /** * Returns the filter associated with listed filtering aliases. @@ -92,8 +333,8 @@ public interface ShardSearchRequest { * The list of filtering aliases should be obtained by calling MetaData.filteringAliases. * Returns {@code null} if no filtering is required.

    */ - static QueryBuilder parseAliasFilter(CheckedFunction filterParser, - IndexMetaData metaData, String... aliasNames) { + public static QueryBuilder parseAliasFilter(CheckedFunction filterParser, + IndexMetaData metaData, String... aliasNames) { if (aliasNames == null || aliasNames.length == 0) { return null; } @@ -137,13 +378,4 @@ public interface ShardSearchRequest { return combined; } } - - /** - * Returns the cluster alias in case the request is part of a cross-cluster search request, null otherwise. - */ - @Nullable - String getClusterAlias(); - - Rewriteable getRewriteable(); - } diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java deleted file mode 100644 index 0cb7b7401bfb..000000000000 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java +++ /dev/null @@ -1,186 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.internal; - -import org.elasticsearch.action.IndicesRequest; -import org.elasticsearch.action.OriginalIndices; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchTask; -import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.query.Rewriteable; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.Scroll; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.transport.TransportRequest; - -import java.io.IOException; -import java.util.Map; - -/** - * Shard level search request that represents an actual search sent from the coordinating node to the nodes holding - * the shards where the query needs to be executed. Holds the same info as {@link org.elasticsearch.search.internal.ShardSearchLocalRequest} - * but gets sent over the transport and holds also the indices coming from the original request that generated it, plus its headers and - * context. - */ -public class ShardSearchTransportRequest extends TransportRequest implements ShardSearchRequest, IndicesRequest { - - private final OriginalIndices originalIndices; - private final ShardSearchLocalRequest shardSearchLocalRequest; - - public ShardSearchTransportRequest(OriginalIndices originalIndices, SearchRequest searchRequest, ShardId shardId, int numberOfShards, - AliasFilter aliasFilter, float indexBoost, long nowInMillis, - @Nullable String clusterAlias, String[] indexRoutings) { - this.shardSearchLocalRequest = new ShardSearchLocalRequest(searchRequest, shardId, numberOfShards, aliasFilter, indexBoost, - nowInMillis, clusterAlias, indexRoutings); - this.originalIndices = originalIndices; - } - - public ShardSearchTransportRequest(StreamInput in) throws IOException { - super(in); - shardSearchLocalRequest = new ShardSearchLocalRequest(in); - originalIndices = OriginalIndices.readOriginalIndices(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - shardSearchLocalRequest.innerWriteTo(out, false); - OriginalIndices.writeOriginalIndices(originalIndices, out); - } - - @Override - public String[] indices() { - if (originalIndices == null) { - return null; - } - return originalIndices.indices(); - } - - @Override - public IndicesOptions indicesOptions() { - if (originalIndices == null) { - return null; - } - return originalIndices.indicesOptions(); - } - - @Override - public ShardId shardId() { - return shardSearchLocalRequest.shardId(); - } - - @Override - public SearchSourceBuilder source() { - return shardSearchLocalRequest.source(); - } - - @Override - public AliasFilter getAliasFilter() { - return shardSearchLocalRequest.getAliasFilter(); - } - - @Override - public void setAliasFilter(AliasFilter filter) { - shardSearchLocalRequest.setAliasFilter(filter); - } - - @Override - public void source(SearchSourceBuilder source) { - shardSearchLocalRequest.source(source); - } - - @Override - public int numberOfShards() { - return shardSearchLocalRequest.numberOfShards(); - } - - @Override - public SearchType searchType() { - return shardSearchLocalRequest.searchType(); - } - - @Override - public float indexBoost() { - return shardSearchLocalRequest.indexBoost(); - } - - @Override - public long nowInMillis() { - return shardSearchLocalRequest.nowInMillis(); - } - - @Override - public Boolean requestCache() { - return shardSearchLocalRequest.requestCache(); - } - - @Override - public boolean allowPartialSearchResults() { - return shardSearchLocalRequest.allowPartialSearchResults(); - } - - @Override - public Scroll scroll() { - return shardSearchLocalRequest.scroll(); - } - - @Override - public String[] indexRoutings() { - return shardSearchLocalRequest.indexRoutings(); - } - - @Override - public String preference() { - return shardSearchLocalRequest.preference(); - } - - @Override - public BytesReference cacheKey() throws IOException { - return shardSearchLocalRequest.cacheKey(); - } - - @Override - public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { - return new SearchTask(id, type, action, getDescription(), parentTaskId, headers); - } - - @Override - public String getDescription() { - // Shard id is enough here, the request itself can be found by looking at the parent task description - return "shardId[" + shardSearchLocalRequest.shardId() + "]"; - } - - @Override - public String getClusterAlias() { - return shardSearchLocalRequest.getClusterAlias(); - } - - @Override - public Rewriteable getRewriteable() { - return shardSearchLocalRequest.getRewriteable(); - } -} diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 64621277f6e6..7f3a7a5b1b51 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -275,7 +275,7 @@ public class QueryPhase implements SearchPhase { if (searchContext.request().allowPartialSearchResults() == false) { // Can't rethrow TimeExceededException because not serializable - throw new QueryPhaseExecutionException(searchContext, "Time exceeded"); + throw new QueryPhaseExecutionException(searchContext.shardTarget(), "Time exceeded"); } queryResult.searchTimedOut(true); } finally { @@ -302,7 +302,7 @@ public class QueryPhase implements SearchPhase { } return topDocsFactory.shouldRescore(); } catch (Exception e) { - throw new QueryPhaseExecutionException(searchContext, "Failed to execute main query", e); + throw new QueryPhaseExecutionException(searchContext.shardTarget(), "Failed to execute main query", e); } } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseExecutionException.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseExecutionException.java index 94d259ef5254..01a0bcde4ce9 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseExecutionException.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseExecutionException.java @@ -20,22 +20,22 @@ package org.elasticsearch.search.query; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.search.SearchContextException; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.SearchException; +import org.elasticsearch.search.SearchShardTarget; import java.io.IOException; -public class QueryPhaseExecutionException extends SearchContextException { +public class QueryPhaseExecutionException extends SearchException { - public QueryPhaseExecutionException(SearchContext context, String msg, Throwable cause) { - super(context, "Query Failed [" + msg + "]", cause); + public QueryPhaseExecutionException(SearchShardTarget shardTarget, String msg, Throwable cause) { + super(shardTarget, "Query Failed [" + msg + "]", cause); } public QueryPhaseExecutionException(StreamInput in) throws IOException { super(in); } - public QueryPhaseExecutionException(SearchContext context, String msg) { - super(context, msg); + public QueryPhaseExecutionException(SearchShardTarget shardTarget, String msg) { + super(shardTarget, msg); } } diff --git a/server/src/main/java/org/elasticsearch/search/slice/SliceBuilder.java b/server/src/main/java/org/elasticsearch/search/slice/SliceBuilder.java index 01924e938dcd..ff24fa75a2c0 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/SliceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/slice/SliceBuilder.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.slice; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -33,7 +32,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -65,11 +63,9 @@ import java.util.Set; */ public class SliceBuilder implements Writeable, ToXContentObject { - private static final DeprecationLogger DEPRECATION_LOG = new DeprecationLogger(LogManager.getLogger(SliceBuilder.class)); - - public static final ParseField FIELD_FIELD = new ParseField("field"); + private static final ParseField FIELD_FIELD = new ParseField("field"); public static final ParseField ID_FIELD = new ParseField("id"); - public static final ParseField MAX_FIELD = new ParseField("max"); + private static final ParseField MAX_FIELD = new ParseField("max"); private static final ObjectParser PARSER = new ObjectParser<>("slice", SliceBuilder::new); diff --git a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 949a5a3ff441..ede0616ccfe6 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -332,10 +332,7 @@ public class FieldSortBuilder extends SortBuilder { Nested nested = null; if (isUnmapped == false) { if (nestedSort != null) { - if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { - throw new QueryShardException(context, - "max_children is only supported on last level of nested sort"); - } + validateMaxChildrenExistOnlyInTopLevelNestedSort(context, nestedSort); nested = resolveNested(context, nestedSort); } else { validateMissingNestedPath(context, fieldName); @@ -362,6 +359,18 @@ public class FieldSortBuilder extends SortBuilder { return new SortFieldAndFormat(field, fieldType.docValueFormat(null, null)); } + /** + * Throws an exception if max children is not located at top level nested sort. + */ + static void validateMaxChildrenExistOnlyInTopLevelNestedSort(QueryShardContext context, NestedSortBuilder nestedSort) { + for (NestedSortBuilder child = nestedSort.getNestedSort(); child != null; child = child.getNestedSort()) { + if (child.getMaxChildren() != Integer.MAX_VALUE) { + throw new QueryShardException(context, + "max_children is only supported on top level of nested sort"); + } + } + } + /** * Throws an exception if the provided field requires a nested context. */ diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 630b93b4f34b..12b378cfd6c0 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -52,7 +52,6 @@ import org.elasticsearch.index.query.GeoValidationMethod; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; @@ -64,6 +63,7 @@ import java.util.Locale; import java.util.Objects; import static org.elasticsearch.search.sort.FieldSortBuilder.validateMissingNestedPath; +import static org.elasticsearch.search.sort.FieldSortBuilder.validateMaxChildrenExistOnlyInTopLevelNestedSort; import static org.elasticsearch.search.sort.NestedSortBuilder.NESTED_FIELD; /** @@ -536,10 +536,7 @@ public class GeoDistanceSortBuilder extends SortBuilder Nested nested = null; if (nestedSort != null) { - if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { - throw new QueryShardException(context, - "max_children is only supported on last level of nested sort"); - } + validateMaxChildrenExistOnlyInTopLevelNestedSort(context, nestedSort); nested = resolveNested(context, nestedSort); } else { validateMissingNestedPath(context, fieldName); diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index 17fed4d9ac19..d3fa3bb0a1fe 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -58,6 +58,7 @@ import java.util.Locale; import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.search.sort.FieldSortBuilder.validateMaxChildrenExistOnlyInTopLevelNestedSort; import static org.elasticsearch.search.sort.NestedSortBuilder.NESTED_FIELD; /** @@ -242,10 +243,7 @@ public class ScriptSortBuilder extends SortBuilder { Nested nested = null; if (nestedSort != null) { - if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) { - throw new QueryShardException(context, - "max_children is only supported on last level of nested sort"); - } + validateMaxChildrenExistOnlyInTopLevelNestedSort(context, nestedSort); nested = resolveNested(context, nestedSort); } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index dc043db14360..9435df2177a5 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -244,7 +244,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements if (startedShards == null) { startedShards = new HashMap<>(); } - startedShards.put(shardId, IndexShardSnapshotStatus.newInitializing()); + startedShards.put(shardId, IndexShardSnapshotStatus.newInitializing(shardSnapshotStatus.generation())); } } if (startedShards != null && startedShards.isEmpty() == false) { @@ -283,12 +283,15 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements assert indexId != null; snapshot(shardId, snapshot, indexId, snapshotStatus, new ActionListener<>() { @Override - public void onResponse(final Void aVoid) { + public void onResponse(String newGeneration) { + assert newGeneration != null; + assert newGeneration.equals(snapshotStatus.generation()); if (logger.isDebugEnabled()) { final IndexShardSnapshotStatus.Copy lastSnapshotStatus = snapshotStatus.asCopy(); - logger.debug("snapshot ({}) completed to {} with {}", snapshot, snapshot.getRepository(), lastSnapshotStatus); + logger.debug("snapshot [{}] completed to [{}] with [{}] at generation [{}]", + snapshot, snapshot.getRepository(), lastSnapshotStatus, snapshotStatus.generation()); } - notifySuccessfulSnapshotShard(snapshot, shardId); + notifySuccessfulSnapshotShard(snapshot, shardId, newGeneration); } @Override @@ -308,7 +311,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements * @param snapshotStatus snapshot status */ private void snapshot(final ShardId shardId, final Snapshot snapshot, final IndexId indexId, - final IndexShardSnapshotStatus snapshotStatus, ActionListener listener) { + final IndexShardSnapshotStatus snapshotStatus, ActionListener listener) { try { final IndexShard indexShard = indicesService.indexServiceSafe(shardId.getIndex()).getShardOrNull(shardId.id()); if (indexShard.routingEntry().primary() == false) { @@ -366,7 +369,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements // but we think the shard is done - we need to make new master know that the shard is done logger.debug("[{}] new master thinks the shard [{}] is not completed but the shard is done locally, " + "updating status on the master", snapshot.snapshot(), shardId); - notifySuccessfulSnapshotShard(snapshot.snapshot(), shardId); + notifySuccessfulSnapshotShard(snapshot.snapshot(), shardId, localShard.getValue().generation()); } else if (stage == Stage.FAILURE) { // but we think the shard failed - we need to make new master know that the shard failed @@ -436,15 +439,16 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements } /** Notify the master node that the given shard has been successfully snapshotted **/ - private void notifySuccessfulSnapshotShard(final Snapshot snapshot, final ShardId shardId) { + private void notifySuccessfulSnapshotShard(final Snapshot snapshot, final ShardId shardId, String generation) { + assert generation != null; sendSnapshotShardUpdate(snapshot, shardId, - new ShardSnapshotStatus(clusterService.localNode().getId(), ShardState.SUCCESS)); + new ShardSnapshotStatus(clusterService.localNode().getId(), ShardState.SUCCESS, generation)); } /** Notify the master node that the given shard failed to be snapshotted **/ private void notifyFailedSnapshotShard(final Snapshot snapshot, final ShardId shardId, final String failure) { sendSnapshotShardUpdate(snapshot, shardId, - new ShardSnapshotStatus(clusterService.localNode().getId(), ShardState.FAILED, failure)); + new ShardSnapshotStatus(clusterService.localNode().getId(), ShardState.FAILED, failure, null)); } /** Updates the shard snapshot status by sending a {@link UpdateIndexShardSnapshotStatusRequest} to the master node */ diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index ecce50093cea..aa8193c92f22 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -123,6 +123,8 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus */ public static final Version NO_REPO_INITIALIZE_VERSION = Version.V_7_5_0; + public static final Version SHARD_GEN_IN_REPO_DATA_VERSION = Version.V_8_0_0; + private static final Logger logger = LogManager.getLogger(SnapshotsService.class); private final ClusterService clusterService; @@ -803,7 +805,8 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus logger.warn("failing snapshot of shard [{}] on closed node [{}]", shardEntry.key, shardStatus.nodeId()); shards.put(shardEntry.key, - new ShardSnapshotStatus(shardStatus.nodeId(), ShardState.FAILED, "node shutdown")); + new ShardSnapshotStatus(shardStatus.nodeId(), ShardState.FAILED, "node shutdown", + shardStatus.generation())); } } } @@ -908,7 +911,8 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus // Shard that we were waiting for has started on a node, let's process it snapshotChanged = true; logger.trace("starting shard that we were waiting for [{}] on node [{}]", shardId, shardStatus.nodeId()); - shards.put(shardId, new ShardSnapshotStatus(shardRouting.primaryShard().currentNodeId())); + shards.put(shardId, + new ShardSnapshotStatus(shardRouting.primaryShard().currentNodeId(), shardStatus.generation())); continue; } else if (shardRouting.primaryShard().initializing() || shardRouting.primaryShard().relocating()) { // Shard that we were waiting for hasn't started yet or still relocating - will continue to wait @@ -920,7 +924,8 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus // Shard that we were waiting for went into unassigned state or disappeared - giving up snapshotChanged = true; logger.warn("failing snapshot of shard [{}] on unassigned shard [{}]", shardId, shardStatus.nodeId()); - shards.put(shardId, new ShardSnapshotStatus(shardStatus.nodeId(), ShardState.FAILED, "shard is unassigned")); + shards.put(shardId, new ShardSnapshotStatus( + shardStatus.nodeId(), ShardState.FAILED, "shard is unassigned", shardStatus.generation())); } else { shards.put(shardId, shardStatus); } @@ -1224,7 +1229,8 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus for (ObjectObjectCursor shardEntry : snapshotEntry.shards()) { ShardSnapshotStatus status = shardEntry.value; if (status.state().completed() == false) { - status = new ShardSnapshotStatus(status.nodeId(), ShardState.ABORTED, "aborted by snapshot deletion"); + status = new ShardSnapshotStatus( + status.nodeId(), ShardState.ABORTED, "aborted by snapshot deletion", status.generation()); } shardsBuilder.put(shardEntry.key, status); } @@ -1410,8 +1416,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus IndexMetaData indexMetaData = metaData.index(indexName); if (indexMetaData == null) { // The index was deleted before we managed to start the snapshot - mark it as missing. - builder.put(new ShardId(indexName, IndexMetaData.INDEX_UUID_NA_VALUE, 0), - new SnapshotsInProgress.ShardSnapshotStatus(null, ShardState.MISSING, "missing index")); + builder.put(new ShardId(indexName, IndexMetaData.INDEX_UUID_NA_VALUE, 0), missingStatus(null, "missing index")); } else { IndexRoutingTable indexRoutingTable = clusterState.getRoutingTable().index(indexName); for (int i = 0; i < indexMetaData.getNumberOfShards(); i++) { @@ -1419,20 +1424,18 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus if (indexRoutingTable != null) { ShardRouting primary = indexRoutingTable.shard(i).primaryShard(); if (primary == null || !primary.assignedToNode()) { - builder.put(shardId, - new SnapshotsInProgress.ShardSnapshotStatus(null, ShardState.MISSING, "primary shard is not allocated")); + builder.put(shardId, missingStatus(null, "primary shard is not allocated")); } else if (primary.relocating() || primary.initializing()) { - builder.put(shardId, new SnapshotsInProgress.ShardSnapshotStatus(primary.currentNodeId(), ShardState.WAITING)); + builder.put(shardId, new SnapshotsInProgress.ShardSnapshotStatus( + primary.currentNodeId(), ShardState.WAITING, null)); } else if (!primary.started()) { - builder.put(shardId, - new SnapshotsInProgress.ShardSnapshotStatus(primary.currentNodeId(), ShardState.MISSING, - "primary shard hasn't been started yet")); + builder.put(shardId, missingStatus(primary.currentNodeId(), "primary shard hasn't been started yet")); } else { - builder.put(shardId, new SnapshotsInProgress.ShardSnapshotStatus(primary.currentNodeId())); + builder.put(shardId, new SnapshotsInProgress.ShardSnapshotStatus( + primary.currentNodeId(), null)); } } else { - builder.put(shardId, new SnapshotsInProgress.ShardSnapshotStatus(null, ShardState.MISSING, - "missing routing table")); + builder.put(shardId, missingStatus(null, "missing routing table")); } } } @@ -1441,6 +1444,10 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus return builder.build(); } + private static ShardSnapshotStatus missingStatus(@Nullable String nodeId, String reason) { + return new SnapshotsInProgress.ShardSnapshotStatus(nodeId, ShardState.MISSING, reason, null); + } + /** * Returns the indices that are currently being snapshotted (with partial == false) and that are contained in the indices-to-check set. */ diff --git a/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java b/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java index 570b1b604106..edf42cc4546b 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java +++ b/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java @@ -19,8 +19,6 @@ package org.elasticsearch.threadpool; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; @@ -263,7 +261,6 @@ public interface Scheduler { * tasks to the uncaught exception handler */ class SafeScheduledThreadPoolExecutor extends ScheduledThreadPoolExecutor { - private static final Logger logger = LogManager.getLogger(SafeScheduledThreadPoolExecutor.class); @SuppressForbidden(reason = "properly rethrowing errors, see EsExecutors.rethrowErrors") public SafeScheduledThreadPoolExecutor(int corePoolSize, ThreadFactory threadFactory, RejectedExecutionHandler handler) { diff --git a/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java b/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java index f8db0d96c541..c11afa088aa5 100644 --- a/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java +++ b/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java @@ -20,24 +20,19 @@ package org.elasticsearch.transport; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRefCounted; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.util.concurrent.KeyedLock; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.common.util.concurrent.RunOnce; import org.elasticsearch.core.internal.io.IOUtils; import java.io.Closeable; -import java.util.ArrayList; -import java.util.Collections; import java.util.Iterator; -import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; @@ -53,8 +48,7 @@ public class ConnectionManager implements Closeable { private static final Logger logger = LogManager.getLogger(ConnectionManager.class); private final ConcurrentMap connectedNodes = ConcurrentCollections.newConcurrentMap(); - private final KeyedLock connectionLock = new KeyedLock<>(); // protects concurrent access to connectingNodes - private final Map>> connectingNodes = ConcurrentCollections.newConcurrentMap(); + private final ConcurrentMap> pendingConnections = ConcurrentCollections.newConcurrentMap(); private final AbstractRefCounted connectingRefCounter = new AbstractRefCounted("connection manager") { @Override protected void closeInternal() { @@ -122,89 +116,62 @@ public class ConnectionManager implements Closeable { return; } - try (Releasable lock = connectionLock.acquire(node.getId())) { - Transport.Connection connection = connectedNodes.get(node); - if (connection != null) { - assert connectingNodes.containsKey(node) == false; - lock.close(); - connectingRefCounter.decRef(); - listener.onResponse(null); - return; - } - - final List> connectionListeners = connectingNodes.computeIfAbsent(node, n -> new ArrayList<>()); - connectionListeners.add(listener); - if (connectionListeners.size() > 1) { - // wait on previous entry to complete connection attempt - connectingRefCounter.decRef(); - return; - } + if (connectedNodes.containsKey(node)) { + connectingRefCounter.decRef(); + listener.onResponse(null); + return; } - final RunOnce releaseOnce = new RunOnce(connectingRefCounter::decRef); + final ListenableFuture currentListener = new ListenableFuture<>(); + final ListenableFuture existingListener = pendingConnections.putIfAbsent(node, currentListener); + if (existingListener != null) { + try { + // wait on previous entry to complete connection attempt + existingListener.addListener(listener, EsExecutors.newDirectExecutorService()); + } finally { + connectingRefCounter.decRef(); + } + return; + } + currentListener.addListener(listener, EsExecutors.newDirectExecutorService()); + + final RunOnce releaseOnce = new RunOnce(connectingRefCounter::decRef); internalOpenConnection(node, resolvedProfile, ActionListener.wrap(conn -> { connectionValidator.validate(conn, resolvedProfile, ActionListener.wrap( ignored -> { assert Transports.assertNotTransportThread("connection validator success"); - boolean success = false; - List> listeners = null; try { - // we acquire a connection lock, so no way there is an existing connection - try (Releasable ignored2 = connectionLock.acquire(node.getId())) { - connectedNodes.put(node, conn); - if (logger.isDebugEnabled()) { - logger.debug("connected to node [{}]", node); - } + if (connectedNodes.putIfAbsent(node, conn) != null) { + logger.debug("existing connection to node [{}], closing new redundant connection", node); + IOUtils.closeWhileHandlingException(conn); + } else { + logger.debug("connected to node [{}]", node); try { - connectionListener.onNodeConnected(node); + connectionListener.onNodeConnected(node, conn); } finally { final Transport.Connection finalConnection = conn; conn.addCloseListener(ActionListener.wrap(() -> { logger.trace("unregistering {} after connection close and marking as disconnected", node); connectedNodes.remove(node, finalConnection); - connectionListener.onNodeDisconnected(node); + connectionListener.onNodeDisconnected(node, conn); })); } - if (conn.isClosed()) { - throw new NodeNotConnectedException(node, "connection concurrently closed"); - } - success = true; - listeners = connectingNodes.remove(node); } - } catch (ConnectTransportException e) { - throw e; - } catch (Exception e) { - throw new ConnectTransportException(node, "general node connection failure", e); } finally { - if (success == false) { // close the connection if there is a failure - logger.trace(() -> new ParameterizedMessage("failed to connect to [{}], cleaning dangling connections", node)); - IOUtils.closeWhileHandlingException(conn); - } else { - releaseOnce.run(); - ActionListener.onResponse(listeners, null); - } + ListenableFuture future = pendingConnections.remove(node); + assert future == currentListener : "Listener in pending map is different than the expected listener"; + releaseOnce.run(); + future.onResponse(null); } }, e -> { assert Transports.assertNotTransportThread("connection validator failure"); IOUtils.closeWhileHandlingException(conn); - final List> listeners; - try (Releasable ignored = connectionLock.acquire(node.getId())) { - listeners = connectingNodes.remove(node); - } - releaseOnce.run(); - ActionListener.onFailure(listeners, e); + failConnectionListeners(node, releaseOnce, e, currentListener); })); }, e -> { assert Transports.assertNotTransportThread("internalOpenConnection failure"); - final List> listeners; - try (Releasable ignored = connectionLock.acquire(node.getId())) { - listeners = connectingNodes.remove(node); - } - releaseOnce.run(); - if (listeners != null) { - ActionListener.onFailure(listeners, e); - } + failConnectionListeners(node, releaseOnce, e, currentListener); })); } @@ -249,13 +216,6 @@ public class ConnectionManager implements Closeable { return connectedNodes.size(); } - /** - * Returns the set of nodes this manager is connected to. - */ - public Set connectedNodes() { - return Collections.unmodifiableSet(connectedNodes.keySet()); - } - @Override public void close() { internalClose(true); @@ -296,6 +256,15 @@ public class ConnectionManager implements Closeable { })); } + private void failConnectionListeners(DiscoveryNode node, RunOnce releaseOnce, Exception e, ListenableFuture expectedListener) { + ListenableFuture future = pendingConnections.remove(node); + releaseOnce.run(); + if (future != null) { + assert future == expectedListener : "Listener in pending map is different than the expected listener"; + future.onFailure(e); + } + } + ConnectionProfile getConnectionProfile() { return defaultProfile; } @@ -305,16 +274,16 @@ public class ConnectionManager implements Closeable { private final CopyOnWriteArrayList listeners = new CopyOnWriteArrayList<>(); @Override - public void onNodeDisconnected(DiscoveryNode key) { + public void onNodeDisconnected(DiscoveryNode key, Transport.Connection connection) { for (TransportConnectionListener listener : listeners) { - listener.onNodeDisconnected(key); + listener.onNodeDisconnected(key, connection); } } @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { for (TransportConnectionListener listener : listeners) { - listener.onNodeConnected(node); + listener.onNodeConnected(node, connection); } } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index 7574258c75b7..979d3bc6a1c2 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -23,7 +23,6 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.SetOnce; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.StepListener; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; @@ -53,7 +52,6 @@ import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; @@ -78,7 +76,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos private static final Logger logger = LogManager.getLogger(RemoteClusterConnection.class); private final TransportService transportService; - private final ConnectionManager connectionManager; + private final RemoteConnectionManager remoteConnectionManager; private final String clusterAlias; private final int maxNumRemoteConnections; private final Predicate nodePredicate; @@ -116,7 +114,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos this.maxNumRemoteConnections = maxNumRemoteConnections; this.nodePredicate = nodePredicate; this.clusterAlias = clusterAlias; - this.connectionManager = connectionManager; + this.remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); this.seedNodes = Collections.unmodifiableList(seedNodes); this.skipUnavailable = RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE .getConcreteSettingForNamespace(clusterAlias).get(settings); @@ -168,8 +166,8 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos } @Override - public void onNodeDisconnected(DiscoveryNode node) { - if (connectionManager.size() < maxNumRemoteConnections) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { + if (remoteConnectionManager.size() < maxNumRemoteConnections) { // try to reconnect and fill up the slot of the disconnected node connectHandler.connect(ActionListener.wrap( ignore -> logger.trace("successfully connected after disconnect of {}", node), @@ -182,7 +180,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos * will invoke the listener immediately. */ void ensureConnected(ActionListener voidActionListener) { - if (connectionManager.size() == 0) { + if (remoteConnectionManager.size() == 0) { connectHandler.connect(voidActionListener); } else { voidActionListener.onResponse(null); @@ -211,8 +209,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos request.clear(); request.nodes(true); request.local(true); // run this on the node that gets the request it's as good as any other - final DiscoveryNode node = getAnyConnectedNode(); - Transport.Connection connection = connectionManager.getConnection(node); + Transport.Connection connection = remoteConnectionManager.getAnyRemoteConnection(); transportService.sendRequest(connection, ClusterStateAction.NAME, request, TransportRequestOptions.EMPTY, new TransportResponseHandler() { @@ -256,12 +253,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos * If such node is not connected, the returned connection will be a proxy connection that redirects to it. */ Transport.Connection getConnection(DiscoveryNode remoteClusterNode) { - if (connectionManager.nodeConnected(remoteClusterNode)) { - return connectionManager.getConnection(remoteClusterNode); - } - DiscoveryNode discoveryNode = getAnyConnectedNode(); - Transport.Connection connection = connectionManager.getConnection(discoveryNode); - return new ProxyConnection(connection, remoteClusterNode); + return remoteConnectionManager.getRemoteConnection(remoteClusterNode); } private Predicate getRemoteClusterNamePredicate() { @@ -280,67 +272,19 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos }; } - - static final class ProxyConnection implements Transport.Connection { - private final Transport.Connection proxyConnection; - private final DiscoveryNode targetNode; - - private ProxyConnection(Transport.Connection proxyConnection, DiscoveryNode targetNode) { - this.proxyConnection = proxyConnection; - this.targetNode = targetNode; - } - - @Override - public DiscoveryNode getNode() { - return targetNode; - } - - @Override - public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options) - throws IOException, TransportException { - proxyConnection.sendRequest(requestId, TransportActionProxy.getProxyAction(action), - TransportActionProxy.wrapRequest(targetNode, request), options); - } - - @Override - public void close() { - assert false: "proxy connections must not be closed"; - } - - @Override - public void addCloseListener(ActionListener listener) { - proxyConnection.addCloseListener(listener); - } - - @Override - public boolean isClosed() { - return proxyConnection.isClosed(); - } - - @Override - public Version getVersion() { - return proxyConnection.getVersion(); - } - } - Transport.Connection getConnection() { - return connectionManager.getConnection(getAnyConnectedNode()); + return remoteConnectionManager.getAnyRemoteConnection(); } @Override public void close() throws IOException { - IOUtils.close(connectHandler); - connectionManager.closeNoBlock(); + IOUtils.close(connectHandler, remoteConnectionManager); } public boolean isClosed() { return connectHandler.isClosed(); } - public String getProxyAddress() { - return proxyAddress; - } - public List>> getSeedNodes() { return seedNodes; } @@ -456,14 +400,14 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos final ConnectionProfile profile = ConnectionProfile.buildSingleChannelProfile(TransportRequestOptions.Type.REG); final StepListener openConnectionStep = new StepListener<>(); try { - connectionManager.openConnection(seedNode, profile, openConnectionStep); + remoteConnectionManager.openConnection(seedNode, profile, openConnectionStep); } catch (Exception e) { onFailure.accept(e); } final StepListener handShakeStep = new StepListener<>(); openConnectionStep.whenComplete(connection -> { - ConnectionProfile connectionProfile = connectionManager.getConnectionProfile(); + ConnectionProfile connectionProfile = remoteConnectionManager.getConnectionManager().getConnectionProfile(); transportService.handshake(connection, connectionProfile.getHandshakeTimeout().millis(), getRemoteClusterNamePredicate(), handShakeStep); }, onFailure); @@ -472,8 +416,8 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos handShakeStep.whenComplete(handshakeResponse -> { final DiscoveryNode handshakeNode = maybeAddProxyAddress(proxyAddress, handshakeResponse.getDiscoveryNode()); - if (nodePredicate.test(handshakeNode) && connectionManager.size() < maxNumRemoteConnections) { - connectionManager.connectToNode(handshakeNode, null, + if (nodePredicate.test(handshakeNode) && remoteConnectionManager.size() < maxNumRemoteConnections) { + remoteConnectionManager.connectToNode(handshakeNode, null, transportService.connectionValidator(handshakeNode), fullConnectionStep); } else { fullConnectionStep.onResponse(null); @@ -565,8 +509,8 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos private void handleNodes(Iterator nodesIter) { while (nodesIter.hasNext()) { final DiscoveryNode node = maybeAddProxyAddress(proxyAddress, nodesIter.next()); - if (nodePredicate.test(node) && connectionManager.size() < maxNumRemoteConnections) { - connectionManager.connectToNode(node, null, + if (nodePredicate.test(node) && remoteConnectionManager.size() < maxNumRemoteConnections) { + remoteConnectionManager.connectToNode(node, null, transportService.connectionValidator(node), new ActionListener<>() { @Override public void onResponse(Void aVoid) { @@ -625,20 +569,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos } boolean isNodeConnected(final DiscoveryNode node) { - return connectionManager.nodeConnected(node); - } - - private final AtomicLong nextNodeId = new AtomicLong(); - - DiscoveryNode getAnyConnectedNode() { - List nodes = new ArrayList<>(connectionManager.connectedNodes()); - if (nodes.isEmpty()) { - throw new NoSuchRemoteClusterException(clusterAlias); - } else { - long curr; - while ((curr = nextNodeId.incrementAndGet()) == Long.MIN_VALUE); - return nodes.get(Math.floorMod(curr, nodes.size())); - } + return remoteConnectionManager.getConnectionManager().nodeConnected(node); } /** @@ -655,7 +586,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos } int getNumNodesConnected() { - return connectionManager.size(); + return remoteConnectionManager.size(); } private static ConnectionManager createConnectionManager(ConnectionProfile connectionProfile, TransportService transportService) { @@ -663,6 +594,6 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos } ConnectionManager getConnectionManager() { - return connectionManager; + return remoteConnectionManager.getConnectionManager(); } } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java new file mode 100644 index 000000000000..813799cda86e --- /dev/null +++ b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java @@ -0,0 +1,155 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.node.DiscoveryNode; + +import java.io.Closeable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.atomic.AtomicLong; + +public class RemoteConnectionManager implements Closeable { + + private final String clusterAlias; + private final ConnectionManager connectionManager; + private final AtomicLong counter = new AtomicLong(); + private volatile List connections = Collections.emptyList(); + + RemoteConnectionManager(String clusterAlias, ConnectionManager connectionManager) { + this.clusterAlias = clusterAlias; + this.connectionManager = connectionManager; + this.connectionManager.addListener(new TransportConnectionListener() { + @Override + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { + addConnection(connection); + } + + @Override + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { + removeConnection(connection); + } + }); + } + + public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfile, + ConnectionManager.ConnectionValidator connectionValidator, + ActionListener listener) throws ConnectTransportException { + connectionManager.connectToNode(node, connectionProfile, connectionValidator, listener); + } + + public void openConnection(DiscoveryNode node, ConnectionProfile profile, ActionListener listener) { + connectionManager.openConnection(node, profile, listener); + } + + public Transport.Connection getRemoteConnection(DiscoveryNode node) { + try { + return connectionManager.getConnection(node); + } catch (NodeNotConnectedException e) { + return new ProxyConnection(getAnyRemoteConnection(), node); + } + } + + public Transport.Connection getAnyRemoteConnection() { + List localConnections = this.connections; + if (localConnections.isEmpty()) { + throw new NoSuchRemoteClusterException(clusterAlias); + } else { + long curr; + while ((curr = counter.incrementAndGet()) == Long.MIN_VALUE); + return localConnections.get(Math.floorMod(curr, localConnections.size())); + } + } + + public ConnectionManager getConnectionManager() { + return connectionManager; + } + + public int size() { + return connectionManager.size(); + } + + public void close() { + connectionManager.closeNoBlock(); + } + + private synchronized void addConnection(Transport.Connection addedConnection) { + ArrayList newConnections = new ArrayList<>(this.connections); + newConnections.add(addedConnection); + this.connections = Collections.unmodifiableList(newConnections); + } + + private synchronized void removeConnection(Transport.Connection removedConnection) { + int newSize = this.connections.size() - 1; + ArrayList newConnections = new ArrayList<>(newSize); + for (Transport.Connection connection : this.connections) { + if (connection.equals(removedConnection) == false) { + newConnections.add(connection); + } + } + assert newConnections.size() == newSize : "Expected connection count: " + newSize + ", Found: " + newConnections.size(); + this.connections = Collections.unmodifiableList(newConnections); + } + + static final class ProxyConnection implements Transport.Connection { + private final Transport.Connection connection; + private final DiscoveryNode targetNode; + + private ProxyConnection(Transport.Connection connection, DiscoveryNode targetNode) { + this.connection = connection; + this.targetNode = targetNode; + } + + @Override + public DiscoveryNode getNode() { + return targetNode; + } + + @Override + public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options) + throws IOException, TransportException { + connection.sendRequest(requestId, TransportActionProxy.getProxyAction(action), + TransportActionProxy.wrapRequest(targetNode, request), options); + } + + @Override + public void close() { + assert false: "proxy connections must not be closed"; + } + + @Override + public void addCloseListener(ActionListener listener) { + connection.addCloseListener(listener); + } + + @Override + public boolean isClosed() { + return connection.isClosed(); + } + + @Override + public Version getVersion() { + return connection.getVersion(); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java b/server/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java index c41a328637c2..f07f6b0417ad 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java @@ -43,10 +43,10 @@ public interface TransportConnectionListener { /** * Called once a node connection is opened and registered. */ - default void onNodeConnected(DiscoveryNode node) {} + default void onNodeConnected(DiscoveryNode node, Transport.Connection connection) {} /** * Called once a node connection is closed and unregistered. */ - default void onNodeDisconnected(DiscoveryNode node) {} + default void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) {} } diff --git a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java index 36fa8ec358c6..9ed826de87cd 100644 --- a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java +++ b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java @@ -57,7 +57,6 @@ import org.elasticsearch.search.SearchContextMissingException; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.transport.RemoteTransportException; import java.io.EOFException; @@ -73,6 +72,7 @@ import java.util.Map; import static java.util.Collections.emptyList; import static java.util.Collections.singleton; import static java.util.Collections.singletonList; +import static org.elasticsearch.test.TestSearchContext.SHARD_TARGET; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.CoreMatchers.hasItem; import static org.hamcrest.CoreMatchers.hasItems; @@ -307,7 +307,7 @@ public class ElasticsearchExceptionTests extends ESTestCase { "\"caused_by\":{\"type\":\"illegal_argument_exception\",\"reason\":\"foo\"}}"); } { - ElasticsearchException e = new SearchParseException(new TestSearchContext(null), "foo", new XContentLocation(1,0)); + ElasticsearchException e = new SearchParseException(SHARD_TARGET, "foo", new XContentLocation(1,0)); assertExceptionAsJson(e, "{\"type\":\"search_parse_exception\",\"reason\":\"foo\",\"line\":1,\"col\":0}"); } { @@ -927,7 +927,7 @@ public class ElasticsearchExceptionTests extends ESTestCase { expected = new ElasticsearchException("Elasticsearch exception [type=parsing_exception, reason=Unknown identifier]"); break; case 2: - actual = new SearchParseException(new TestSearchContext(null), "Parse failure", new XContentLocation(12, 98)); + actual = new SearchParseException(SHARD_TARGET, "Parse failure", new XContentLocation(12, 98)); expected = new ElasticsearchException("Elasticsearch exception [type=search_parse_exception, reason=Parse failure]"); break; case 3: diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index d5f2186ddb00..648dd142eeca 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -79,13 +79,11 @@ import org.elasticsearch.search.SearchException; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotException; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInProgressException; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.transport.ActionNotFoundTransportException; import org.elasticsearch.transport.ActionTransportException; @@ -121,6 +119,7 @@ import static java.lang.reflect.Modifier.isInterface; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static java.util.Collections.singleton; +import static org.elasticsearch.test.TestSearchContext.SHARD_TARGET; import static org.hamcrest.Matchers.greaterThanOrEqualTo; public class ExceptionSerializationTests extends ESTestCase { @@ -388,12 +387,10 @@ public class ExceptionSerializationTests extends ESTestCase { } public void testSearchParseException() throws IOException { - SearchContext ctx = new TestSearchContext(null); - SearchParseException ex = serialize(new SearchParseException(ctx, "foo", new XContentLocation(66, 666))); + SearchParseException ex = serialize(new SearchParseException(SHARD_TARGET, "foo", new XContentLocation(66, 666))); assertEquals("foo", ex.getMessage()); assertEquals(66, ex.getLineNumber()); assertEquals(666, ex.getColumnNumber()); - assertEquals(ctx.shardTarget(), ex.shard()); } public void testIllegalIndexShardStateException() throws IOException { @@ -789,7 +786,7 @@ public class ExceptionSerializationTests extends ESTestCase { ids.put(124, null); ids.put(125, TcpTransport.HttpRequestOnTransportException.class); ids.put(126, org.elasticsearch.index.mapper.MapperParsingException.class); - ids.put(127, org.elasticsearch.search.SearchContextException.class); + ids.put(127, null); // was org.elasticsearch.search.SearchContextException.class ids.put(128, org.elasticsearch.search.builder.SearchSourceBuilderException.class); ids.put(129, null); // was org.elasticsearch.index.engine.EngineClosedException.class ids.put(130, org.elasticsearch.action.NoShardAvailableActionException.class); diff --git a/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java index 9d23ca48e81f..1c6d6c9932a7 100644 --- a/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java @@ -329,7 +329,7 @@ public class IndicesRequestIT extends ESIntegTestCase { String explainShardAction = ExplainAction.NAME + "[s]"; interceptTransportActions(explainShardAction); - ExplainRequest explainRequest = new ExplainRequest(randomIndexOrAlias(), "type", "id").query(QueryBuilders.matchAllQuery()); + ExplainRequest explainRequest = new ExplainRequest(randomIndexOrAlias(), "id").query(QueryBuilders.matchAllQuery()); internalCluster().coordOnlyNodeClient().explain(explainRequest).actionGet(); clearInterceptedActions(); diff --git a/server/src/test/java/org/elasticsearch/action/ShardValidateQueryRequestTests.java b/server/src/test/java/org/elasticsearch/action/ShardValidateQueryRequestTests.java index 6e402b083450..d537bdabece3 100644 --- a/server/src/test/java/org/elasticsearch/action/ShardValidateQueryRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/ShardValidateQueryRequestTests.java @@ -55,14 +55,12 @@ public class ShardValidateQueryRequestTests extends ESTestCase { validateQueryRequest.query(QueryBuilders.termQuery("field", "value")); validateQueryRequest.rewrite(true); validateQueryRequest.explain(false); - validateQueryRequest.types("type1", "type2"); ShardValidateQueryRequest request = new ShardValidateQueryRequest(new ShardId("index", "foobar", 1), - new AliasFilter(QueryBuilders.termQuery("filter_field", "value"), new String[] {"alias0", "alias1"}), validateQueryRequest); + new AliasFilter(QueryBuilders.termQuery("filter_field", "value"), "alias0", "alias1"), validateQueryRequest); request.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { ShardValidateQueryRequest readRequest = new ShardValidateQueryRequest(in); assertEquals(request.filteringAliases(), readRequest.filteringAliases()); - assertArrayEquals(request.types(), readRequest.types()); assertEquals(request.explain(), readRequest.explain()); assertEquals(request.query(), readRequest.query()); assertEquals(request.rewrite(), readRequest.rewrite()); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java index c84d914d6760..f79c0e48d015 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java @@ -167,7 +167,7 @@ public class CancellableTasksTests extends TaskManagerTestCase { // Simulate a job that takes forever to finish // Using periodic checks method to identify that the task was cancelled try { - awaitBusy(() -> { + waitUntil(() -> { if (((CancellableTask) task).isCancelled()) { throw new TaskCancelledException("Cancelled"); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index 3c4b59ab7849..8797dd3962c8 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -75,7 +75,7 @@ import java.util.Map; import java.util.Objects; import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; -import static org.elasticsearch.test.ESTestCase.awaitBusy; +import static org.elasticsearch.test.ESTestCase.waitUntil; /** * A plugin that adds a cancellable blocking test task of integration testing of the task manager. @@ -305,7 +305,7 @@ public class TestTaskPlugin extends Plugin implements ActionPlugin, NetworkPlugi logger.info("Test task started on the node {}", clusterService.localNode()); if (request.shouldBlock) { try { - awaitBusy(() -> { + waitUntil(() -> { if (((CancellableTask) task).isCancelled()) { throw new RuntimeException("Cancelled!"); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java index 2822a9661fd1..76f35bcdcc39 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java @@ -28,8 +28,9 @@ public class SnapshotStatsTests extends AbstractXContentTestCase @Override protected SnapshotStats createTestInstance() { - long startTime = randomNonNegativeLong(); - long time = randomNonNegativeLong(); + // Using less than half of Long.MAX_VALUE for random time values to avoid long overflow in tests that add the two time values + long startTime = randomLongBetween(0, Long.MAX_VALUE / 2 - 1); + long time = randomLongBetween(0, Long.MAX_VALUE / 2 - 1); int incrementalFileCount = randomIntBetween(0, Integer.MAX_VALUE); int totalFileCount = randomIntBetween(0, Integer.MAX_VALUE); int processedFileCount = randomIntBetween(0, Integer.MAX_VALUE); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index dfba68d364b0..8ef668c01b72 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.bulk; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.bulk.TransportBulkActionTookTests.Resolver; @@ -28,11 +29,17 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; +import org.elasticsearch.ingest.IngestService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.TestThreadPool; @@ -42,9 +49,12 @@ import org.junit.After; import org.junit.Before; import java.util.Collections; +import java.util.List; import java.util.concurrent.TimeUnit; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; public class TransportBulkActionTests extends ESTestCase { @@ -154,4 +164,152 @@ public class TransportBulkActionTests extends ESTestCase { UpdateRequest badUpsertRequest = new UpdateRequest("index", "type", "id1"); assertNull(TransportBulkAction.getIndexWriteRequest(badUpsertRequest)); } + + public void testResolveRequiredOrDefaultPipelineDefaultPipeline() { + IndexMetaData.Builder builder = IndexMetaData.builder("idx") + .settings(settings(Version.CURRENT).put(IndexSettings.DEFAULT_PIPELINE.getKey(), "default-pipeline")) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(AliasMetaData.builder("alias").writeIndex(true).build()); + MetaData metaData = MetaData.builder().put(builder).build(); + + // index name matches with IDM: + IndexRequest indexRequest = new IndexRequest("idx"); + boolean result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("default-pipeline")); + + // alias name matches with IDM: + indexRequest = new IndexRequest("alias"); + result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("default-pipeline")); + + // index name matches with ITMD: + IndexTemplateMetaData.Builder templateBuilder = IndexTemplateMetaData.builder("name1") + .patterns(List.of("id*")) + .settings(settings(Version.CURRENT).put(IndexSettings.DEFAULT_PIPELINE.getKey(), "default-pipeline")); + metaData = MetaData.builder().put(templateBuilder).build(); + indexRequest = new IndexRequest("idx"); + result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("default-pipeline")); + } + + public void testResolveRequiredOrDefaultPipelineRequiredPipeline() { + IndexMetaData.Builder builder = IndexMetaData.builder("idx") + .settings(settings(Version.CURRENT).put(IndexSettings.REQUIRED_PIPELINE.getKey(), "required-pipeline")) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(AliasMetaData.builder("alias").writeIndex(true).build()); + MetaData metaData = MetaData.builder().put(builder).build(); + + // index name matches with IDM: + IndexRequest indexRequest = new IndexRequest("idx"); + boolean result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("required-pipeline")); + + // alias name matches with IDM: + indexRequest = new IndexRequest("alias"); + result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("required-pipeline")); + + // index name matches with ITMD: + IndexTemplateMetaData.Builder templateBuilder = IndexTemplateMetaData.builder("name1") + .patterns(List.of("id*")) + .settings(settings(Version.CURRENT).put(IndexSettings.REQUIRED_PIPELINE.getKey(), "required-pipeline")); + metaData = MetaData.builder().put(templateBuilder).build(); + indexRequest = new IndexRequest("idx"); + result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("required-pipeline")); + } + + public void testResolveRequiredOrDefaultAndRequiredPipeline() { + IndexTemplateMetaData.Builder builder1 = IndexTemplateMetaData.builder("name1") + .patterns(List.of("i*")) + .settings(settings(Version.CURRENT).put(IndexSettings.REQUIRED_PIPELINE.getKey(), "required-pipeline")); + IndexTemplateMetaData.Builder builder2 = IndexTemplateMetaData.builder("name2") + .patterns(List.of("id*")) + .settings(settings(Version.CURRENT).put(IndexSettings.DEFAULT_PIPELINE.getKey(), "default-pipeline")); + MetaData metaData = MetaData.builder().put(builder1).put(builder2).build(); + + IndexRequest indexRequest = new IndexRequest("idx"); + Exception e = expectThrows(IllegalArgumentException.class, + () -> TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData)); + assertThat(e.getMessage(), + equalTo("required pipeline [required-pipeline] and default pipeline [default-pipeline] can not both be set")); + } + + public void testResolveRequiredOrDefaultPipelineRequestPipeline() { + // no pipeline: + { + MetaData metaData = MetaData.builder().build(); + IndexRequest indexRequest = new IndexRequest("idx"); + boolean result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(false)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo(IngestService.NOOP_PIPELINE_NAME)); + } + + // request pipeline: + { + MetaData metaData = MetaData.builder().build(); + IndexRequest indexRequest = new IndexRequest("idx").setPipeline("request-pipeline"); + boolean result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("request-pipeline")); + } + + // request pipeline with default pipeline: + { + IndexMetaData.Builder builder = IndexMetaData.builder("idx") + .settings(settings(Version.CURRENT).put(IndexSettings.DEFAULT_PIPELINE.getKey(), "default-pipeline")) + .numberOfShards(1) + .numberOfReplicas(0); + MetaData metaData = MetaData.builder().put(builder).build(); + IndexRequest indexRequest = new IndexRequest("idx").setPipeline("request-pipeline"); + boolean result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("request-pipeline")); + } + + // request pipeline with required pipeline: + { + IndexMetaData.Builder builder = IndexMetaData.builder("idx") + .settings(settings(Version.CURRENT).put(IndexSettings.REQUIRED_PIPELINE.getKey(), "required-pipeline")) + .numberOfShards(1) + .numberOfReplicas(0); + MetaData metaData = MetaData.builder().put(builder).build(); + IndexRequest indexRequest = new IndexRequest("idx").setPipeline("request-pipeline"); + Exception e = expectThrows(IllegalArgumentException.class, + () -> TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData)); + assertThat(e.getMessage(), + equalTo("request pipeline [request-pipeline] can not override required pipeline [required-pipeline]")); + } + + // request pipeline set to required pipeline: + { + IndexMetaData.Builder builder = IndexMetaData.builder("idx") + .settings(settings(Version.CURRENT).put(IndexSettings.REQUIRED_PIPELINE.getKey(), "required-pipeline")) + .numberOfShards(1) + .numberOfReplicas(0); + MetaData metaData = MetaData.builder().put(builder).build(); + IndexRequest indexRequest = new IndexRequest("idx").setPipeline("required-pipeline").isPipelineResolved(true); + boolean result = TransportBulkAction.resolveRequiredOrDefaultPipeline(indexRequest, indexRequest, metaData); + assertThat(result, is(true)); + assertThat(indexRequest.isPipelineResolved(), is(true)); + assertThat(indexRequest.getPipeline(), equalTo("required-pipeline")); + } + } } diff --git a/server/src/test/java/org/elasticsearch/action/explain/ExplainRequestTests.java b/server/src/test/java/org/elasticsearch/action/explain/ExplainRequestTests.java index 9447c927b2e2..b279bf948513 100644 --- a/server/src/test/java/org/elasticsearch/action/explain/ExplainRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/explain/ExplainRequestTests.java @@ -54,9 +54,9 @@ public class ExplainRequestTests extends ESTestCase { public void testSerialize() throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { - ExplainRequest request = new ExplainRequest("index", "type", "id"); + ExplainRequest request = new ExplainRequest("index", "id"); request.fetchSourceContext(new FetchSourceContext(true, new String[]{"field1.*"}, new String[] {"field2.*"})); - request.filteringAlias(new AliasFilter(QueryBuilders.termQuery("filter_field", "value"), new String[] {"alias0", "alias1"})); + request.filteringAlias(new AliasFilter(QueryBuilders.termQuery("filter_field", "value"), "alias0", "alias1")); request.preference("the_preference"); request.query(QueryBuilders.termQuery("field", "value")); request.storedFields(new String[] {"field1", "field2"}); @@ -76,7 +76,7 @@ public class ExplainRequestTests extends ESTestCase { public void testValidation() { { - final ExplainRequest request = new ExplainRequest("index4", "_doc", "0"); + final ExplainRequest request = new ExplainRequest("index4", "0"); request.query(QueryBuilders.termQuery("field", "value")); final ActionRequestValidationException validate = request.validate(); @@ -85,12 +85,12 @@ public class ExplainRequestTests extends ESTestCase { } { - final ExplainRequest request = new ExplainRequest("index4", randomBoolean() ? "" : null, randomBoolean() ? "" : null); + final ExplainRequest request = new ExplainRequest("index4", randomBoolean() ? "" : null); request.query(QueryBuilders.termQuery("field", "value")); final ActionRequestValidationException validate = request.validate(); assertThat(validate, not(nullValue())); - assertThat(validate.validationErrors(), hasItems("type is missing", "id is missing")); + assertThat(validate.validationErrors(), hasItems("id is missing")); } } } diff --git a/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java index 4f62c1375899..80d9fc44efae 100644 --- a/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java @@ -58,7 +58,6 @@ public class ExplainResponseTests extends AbstractSerializingTestCase> getPlugins() { + return List.of(TestPlugin.class); + } + + public void testAsyncProcessorImplementation() { + // A pipeline with 2 processors: the test async processor and sync test processor. + BytesReference pipelineBody = new BytesArray("{\"processors\": [{\"test-async\": {}, \"test\": {}}]}"); + client().admin().cluster().putPipeline(new PutPipelineRequest("_id", pipelineBody, XContentType.JSON)).actionGet(); + + BulkRequest bulkRequest = new BulkRequest(); + int numDocs = randomIntBetween(8, 256); + for (int i = 0; i < numDocs; i++) { + bulkRequest.add(new IndexRequest("foobar") + .id(Integer.toString(i)) + .source("{}", XContentType.JSON) + .setPipeline("_id") + ); + } + BulkResponse bulkResponse = client().bulk(bulkRequest).actionGet(); + assertThat(bulkResponse.getItems().length, equalTo(numDocs)); + for (int i = 0; i < numDocs; i++) { + String id = Integer.toString(i); + assertThat(bulkResponse.getItems()[i].getId(), equalTo(id)); + GetResponse getResponse = client().get(new GetRequest("foobar", id)).actionGet(); + // The expected result of async test processor: + assertThat(getResponse.getSource().get("foo"), equalTo("bar-" + id)); + // The expected result of sync test processor: + assertThat(getResponse.getSource().get("bar"), equalTo("baz-" + id)); + } + } + + public static class TestPlugin extends Plugin implements IngestPlugin { + + private ThreadPool threadPool; + + @Override + public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, ScriptService scriptService, + NamedXContentRegistry xContentRegistry, Environment environment, + NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { + this.threadPool = threadPool; + return List.of(); + } + + @Override + public Map getProcessors(Processor.Parameters parameters) { + return Map.of( + "test-async", (factories, tag, config) -> { + return new AbstractProcessor(tag) { + + @Override + public void execute(IngestDocument ingestDocument, BiConsumer handler) { + threadPool.generic().execute(() -> { + String id = (String) ingestDocument.getSourceAndMetadata().get("_id"); + if (usually()) { + try { + Thread.sleep(10); + } catch (InterruptedException e) { + // ignore + } + } + ingestDocument.setFieldValue("foo", "bar-" + id); + handler.accept(ingestDocument, null); + }); + } + + @Override + public IngestDocument execute(IngestDocument ingestDocument) throws Exception { + throw new UnsupportedOperationException(); + } + + @Override + public String getType() { + return "test-async"; + } + }; + }, + "test", (processorFactories, tag, config) -> { + return new AbstractProcessor(tag) { + @Override + public IngestDocument execute(IngestDocument ingestDocument) throws Exception { + String id = (String) ingestDocument.getSourceAndMetadata().get("_id"); + ingestDocument.setFieldValue("bar", "baz-" + id); + return ingestDocument; + } + + @Override + public String getType() { + return "test"; + } + }; + } + ); + } + } + +} diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index 174b164aead2..83484ab68706 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -33,7 +33,7 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.internal.ShardSearchTransportRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; @@ -146,7 +146,7 @@ public class AbstractSearchAsyncActionTests extends ESTestCase { String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); SearchShardIterator iterator = new SearchShardIterator(clusterAlias, new ShardId(new Index("name", "foo"), 1), Collections.emptyList(), new OriginalIndices(new String[] {"name", "name1"}, IndicesOptions.strictExpand())); - ShardSearchTransportRequest shardSearchTransportRequest = action.buildShardSearchRequest(iterator); + ShardSearchRequest shardSearchTransportRequest = action.buildShardSearchRequest(iterator); assertEquals(IndicesOptions.strictExpand(), shardSearchTransportRequest.indicesOptions()); assertArrayEquals(new String[] {"name", "name1"}, shardSearchTransportRequest.indices()); assertEquals(new MatchAllQueryBuilder(), shardSearchTransportRequest.getAliasFilter().getQueryBuilder()); diff --git a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java index 44fe3f92c615..8098459ce71a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java @@ -30,7 +30,7 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.AliasFilter; -import org.elasticsearch.search.internal.ShardSearchTransportRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; @@ -60,7 +60,7 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override - public void sendCanMatch(Transport.Connection connection, ShardSearchTransportRequest request, SearchTask task, + public void sendCanMatch(Transport.Connection connection, ShardSearchRequest request, SearchTask task, ActionListener listener) { new Thread(() -> listener.onResponse(new SearchService.CanMatchResponse(request.shardId().id() == 0 ? shard1 : shard2))).start(); @@ -117,7 +117,7 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { final boolean shard1 = randomBoolean(); SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override - public void sendCanMatch(Transport.Connection connection, ShardSearchTransportRequest request, SearchTask task, + public void sendCanMatch(Transport.Connection connection, ShardSearchRequest request, SearchTask task, ActionListener listener) { boolean throwException = request.shardId().id() != 0; if (throwException && randomBoolean()) { @@ -185,7 +185,7 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { @Override public void sendCanMatch( Transport.Connection connection, - ShardSearchTransportRequest request, + ShardSearchRequest request, SearchTask task, ActionListener listener) { listener.onResponse(new SearchService.CanMatchResponse(randomBoolean())); diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index 4701290fb65b..80fed5b2a262 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.action.search; import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.document.DocumentField; -import org.elasticsearch.common.text.Text; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -51,8 +50,8 @@ public class ExpandSearchPhaseTests extends ESTestCase { final int numInnerHits = randomIntBetween(1, 5); List collapsedHits = new ArrayList<>(numInnerHits); for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) { - SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(innerHitNum, "ID", new Text("type"), - Collections.emptyMap()), new SearchHit(innerHitNum + 1, "ID", new Text("type"), + SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(innerHitNum, "ID", + Collections.emptyMap()), new SearchHit(innerHitNum + 1, "ID", Collections.emptyMap())}, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1.0F); collapsedHits.add(hits); } @@ -101,7 +100,7 @@ public class ExpandSearchPhaseTests extends ESTestCase { } }; - SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", new Text("type"), + SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))))}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); @@ -132,8 +131,8 @@ public class ExpandSearchPhaseTests extends ESTestCase { public void testFailOneItemFailsEntirePhase() throws IOException { AtomicBoolean executedMultiSearch = new AtomicBoolean(false); - SearchHits collapsedHits = new SearchHits(new SearchHit[]{new SearchHit(2, "ID", new Text("type"), - Collections.emptyMap()), new SearchHit(3, "ID", new Text("type"), + SearchHits collapsedHits = new SearchHits(new SearchHit[]{new SearchHit(2, "ID", + Collections.emptyMap()), new SearchHit(3, "ID", Collections.emptyMap())}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); String collapseValue = randomBoolean() ? null : "boom"; @@ -155,9 +154,9 @@ public class ExpandSearchPhaseTests extends ESTestCase { } }; - SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", new Text("type"), + SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue)))), - new SearchHit(2, "ID2", new Text("type"), + new SearchHit(2, "ID2", Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))))}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); @@ -186,9 +185,9 @@ public class ExpandSearchPhaseTests extends ESTestCase { } }; - SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", new Text("type"), + SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(null)))), - new SearchHit(2, "ID2", new Text("type"), + new SearchHit(2, "ID2", Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(null))))}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index 40c3ad0afc0d..f1314c100ebb 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Nullable; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.internal.ShardSearchTransportRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.transport.Transport; import org.junit.Assert; @@ -111,7 +111,7 @@ public final class MockSearchPhaseContext implements SearchPhaseContext { } @Override - public ShardSearchTransportRequest buildShardSearchRequest(SearchShardIterator shardIt) { + public ShardSearchRequest buildShardSearchRequest(SearchShardIterator shardIt) { Assert.fail("should not be called"); return null; } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 3a1adf9748a0..142f972ba17e 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -303,7 +303,7 @@ public class SearchPhaseControllerTests extends ESTestCase { List searchHits = new ArrayList<>(); for (ScoreDoc scoreDoc : mergedSearchDocs) { if (scoreDoc.shardIndex == shardIndex) { - searchHits.add(new SearchHit(scoreDoc.doc, "", new Text(""), Collections.emptyMap())); + searchHits.add(new SearchHit(scoreDoc.doc, "", Collections.emptyMap())); if (scoreDoc.score > maxScore) { maxScore = scoreDoc.score; } @@ -314,7 +314,7 @@ public class SearchPhaseControllerTests extends ESTestCase { for (CompletionSuggestion.Entry.Option option : ((CompletionSuggestion) suggestion).getOptions()) { ScoreDoc doc = option.getDoc(); if (doc.shardIndex == shardIndex) { - searchHits.add(new SearchHit(doc.doc, "", new Text(""), Collections.emptyMap())); + searchHits.add(new SearchHit(doc.doc, "", Collections.emptyMap())); if (doc.score > maxScore) { maxScore = doc.score; } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index 16e34ffec3fd..2f437eb60b30 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentHelper; @@ -207,7 +206,7 @@ public class SearchResponseTests extends ESTestCase { } public void testToXContent() { - SearchHit hit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap()); + SearchHit hit = new SearchHit(1, "id1", Collections.emptyMap()); hit.score(2.0f); SearchHit[] hits = new SearchHit[] { hit }; { @@ -233,7 +232,7 @@ public class SearchResponseTests extends ESTestCase { { expectedString.append("{\"total\":{\"value\":100,\"relation\":\"eq\"},"); expectedString.append("\"max_score\":1.5,"); - expectedString.append("\"hits\":[{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":2.0}]}"); + expectedString.append("\"hits\":[{\"_id\":\"id1\",\"_score\":2.0}]}"); } } expectedString.append("}"); @@ -268,7 +267,7 @@ public class SearchResponseTests extends ESTestCase { { expectedString.append("{\"total\":{\"value\":100,\"relation\":\"eq\"},"); expectedString.append("\"max_score\":1.5,"); - expectedString.append("\"hits\":[{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":2.0}]}"); + expectedString.append("\"hits\":[{\"_id\":\"id1\",\"_score\":2.0}]}"); } } expectedString.append("}"); diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index 60078486335c..4054cc0355b4 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -492,7 +492,7 @@ public class TransportSearchActionTests extends ESTestCase { CountDownLatch disconnectedLatch = new CountDownLatch(numDisconnectedClusters); RemoteClusterServiceTests.addConnectionListener(remoteClusterService, new TransportConnectionListener() { @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { if (disconnectedNodes.remove(node)) { disconnectedLatch.countDown(); } @@ -662,7 +662,7 @@ public class TransportSearchActionTests extends ESTestCase { CountDownLatch disconnectedLatch = new CountDownLatch(numDisconnectedClusters); RemoteClusterServiceTests.addConnectionListener(remoteClusterService, new TransportConnectionListener() { @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { if (disconnectedNodes.remove(node)) { disconnectedLatch.countDown(); } diff --git a/server/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/server/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index a78bd4a67ece..44179569b314 100644 --- a/server/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -128,10 +128,11 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { Settings masterDataPathSettings = internalCluster().dataPathSettings(masterNode); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(masterNode)); - awaitBusy(() -> { + assertBusy(() -> { ClusterState clusterState = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); - return clusterState.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID); + assertTrue(clusterState.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID)); }); + state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); assertThat(state.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID), equalTo(true)); // verify that both nodes are still in the cluster state but there is no master diff --git a/server/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java b/server/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java index 9e21a8f9f591..72db9e8f898a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java @@ -219,11 +219,10 @@ public class NoMasterNodeIT extends ESIntegTestCase { final Client clientToMasterlessNode = client(); - assertTrue(awaitBusy(() -> { - ClusterState state = clientToMasterlessNode.admin().cluster().prepareState().setLocal(true).get().getState(); - return state.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID); - } - )); + assertBusy(() -> { + ClusterState state = clientToMasterlessNode.admin().cluster().prepareState().setLocal(true).get().getState(); + assertTrue(state.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID)); + }); GetResponse getResponse = clientToMasterlessNode.prepareGet("test1", "1").get(); assertExists(getResponse); diff --git a/server/src/test/java/org/elasticsearch/cluster/SnapshotsInProgressTests.java b/server/src/test/java/org/elasticsearch/cluster/SnapshotsInProgressTests.java index 087366d1188c..a64d146e5527 100644 --- a/server/src/test/java/org/elasticsearch/cluster/SnapshotsInProgressTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/SnapshotsInProgressTests.java @@ -57,14 +57,14 @@ public class SnapshotsInProgressTests extends ESTestCase { ImmutableOpenMap.Builder shards = ImmutableOpenMap.builder(); // test more than one waiting shard in an index - shards.put(new ShardId(idx1Name, idx1UUID, 0), new ShardSnapshotStatus(randomAlphaOfLength(2), ShardState.WAITING)); - shards.put(new ShardId(idx1Name, idx1UUID, 1), new ShardSnapshotStatus(randomAlphaOfLength(2), ShardState.WAITING)); - shards.put(new ShardId(idx1Name, idx1UUID, 2), new ShardSnapshotStatus(randomAlphaOfLength(2), randomNonWaitingState(), "")); + shards.put(new ShardId(idx1Name, idx1UUID, 0), new ShardSnapshotStatus(randomAlphaOfLength(2), ShardState.WAITING, "1")); + shards.put(new ShardId(idx1Name, idx1UUID, 1), new ShardSnapshotStatus(randomAlphaOfLength(2), ShardState.WAITING, "1")); + shards.put(new ShardId(idx1Name, idx1UUID, 2), new ShardSnapshotStatus(randomAlphaOfLength(2), randomNonWaitingState(), "", "1")); // test exactly one waiting shard in an index - shards.put(new ShardId(idx2Name, idx2UUID, 0), new ShardSnapshotStatus(randomAlphaOfLength(2), ShardState.WAITING)); - shards.put(new ShardId(idx2Name, idx2UUID, 1), new ShardSnapshotStatus(randomAlphaOfLength(2), randomNonWaitingState(), "")); + shards.put(new ShardId(idx2Name, idx2UUID, 0), new ShardSnapshotStatus(randomAlphaOfLength(2), ShardState.WAITING, "1")); + shards.put(new ShardId(idx2Name, idx2UUID, 1), new ShardSnapshotStatus(randomAlphaOfLength(2), randomNonWaitingState(), "", "1")); // test no waiting shards in an index - shards.put(new ShardId(idx3Name, idx3UUID, 0), new ShardSnapshotStatus(randomAlphaOfLength(2), randomNonWaitingState(), "")); + shards.put(new ShardId(idx3Name, idx3UUID, 0), new ShardSnapshotStatus(randomAlphaOfLength(2), randomNonWaitingState(), "", "1")); Entry entry = new Entry(snapshot, randomBoolean(), randomBoolean(), State.INIT, indices, System.currentTimeMillis(), randomLong(), shards.build(), SnapshotInfoTests.randomUserMetadata()); diff --git a/server/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java b/server/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java index edcf4446dc2b..a528670ef000 100644 --- a/server/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java @@ -38,9 +38,11 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import java.util.Arrays; import java.util.List; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @ClusterScope(scope= ESIntegTestCase.Scope.TEST, numDataNodes =0, minNumDataNodes = 2) @@ -78,40 +80,43 @@ public class AwarenessAllocationIT extends ESIntegTestCase { final String node3 = internalCluster().startNode(Settings.builder().put(commonSettings).put("node.attr.rack_id", "rack_2").build()); // On slow machines the initial relocation might be delayed - assertThat(awaitBusy( - () -> { - logger.info("--> waiting for no relocation"); - ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth() - .setIndices("test1", "test2") - .setWaitForEvents(Priority.LANGUID) - .setWaitForGreenStatus() - .setWaitForNodes("3") - .setWaitForNoRelocatingShards(true) - .get(); - if (clusterHealth.isTimedOut()) { - return false; - } + assertBusy( + () -> { + logger.info("--> waiting for no relocation"); + ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth() + .setIndices("test1", "test2") + .setWaitForEvents(Priority.LANGUID) + .setWaitForGreenStatus() + .setWaitForNodes("3") + .setWaitForNoRelocatingShards(true) + .get(); - logger.info("--> checking current state"); - ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState(); - // check that closed indices are effectively closed - if (indicesToClose.stream().anyMatch(index -> clusterState.metaData().index(index).getState() != State.CLOSE)) { - return false; - } - // verify that we have all the primaries on node3 - ObjectIntHashMap counts = new ObjectIntHashMap<>(); - for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) { - for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { - for (ShardRouting shardRouting : indexShardRoutingTable) { - counts.addTo(clusterState.nodes().get(shardRouting.currentNodeId()).getName(), 1); - } + assertThat("Cluster health request timed out", clusterHealth.isTimedOut(), equalTo(false)); + + logger.info("--> checking current state"); + ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState(); + + // check that closed indices are effectively closed + final List notClosedIndices = + indicesToClose.stream() + .filter(index -> clusterState.metaData().index(index).getState() != State.CLOSE) + .collect(Collectors.toList()); + assertThat("Some indices not closed", notClosedIndices, empty()); + + // verify that we have all the primaries on node3 + ObjectIntHashMap counts = new ObjectIntHashMap<>(); + for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) { + for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { + for (ShardRouting shardRouting : indexShardRoutingTable) { + counts.addTo(clusterState.nodes().get(shardRouting.currentNodeId()).getName(), 1); } } - return counts.get(node3) == totalPrimaries; - }, - 10, - TimeUnit.SECONDS - ), equalTo(true)); + } + assertThat(counts.get(node3), equalTo(totalPrimaries)); + }, + 10, + TimeUnit.SECONDS + ); } public void testAwarenessZones() { diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java index 31298141e833..e4560d0613cc 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java @@ -585,6 +585,7 @@ public class CoordinatorTests extends AbstractCoordinatorTestCase { final ClusterNode follower0 = cluster.getAnyNodeExcept(leader); final ClusterNode follower1 = cluster.getAnyNodeExcept(leader, follower0); + follower0.allowClusterStateApplicationFailure(); follower0.setClusterStateApplyResponse(ClusterStateApplyResponse.FAIL); AckCollector ackCollector = leader.submitValue(randomLong()); cluster.stabilise(DEFAULT_CLUSTER_STATE_UPDATE_DELAY); @@ -604,6 +605,7 @@ public class CoordinatorTests extends AbstractCoordinatorTestCase { final ClusterNode follower1 = cluster.getAnyNodeExcept(leader, follower0); final long startingTerm = leader.coordinator.getCurrentTerm(); + leader.allowClusterStateApplicationFailure(); leader.setClusterStateApplyResponse(ClusterStateApplyResponse.FAIL); AckCollector ackCollector = leader.submitValue(randomLong()); cluster.runFor(DEFAULT_CLUSTER_STATE_UPDATE_DELAY, "committing value"); @@ -1215,9 +1217,15 @@ public class CoordinatorTests extends AbstractCoordinatorTestCase { clusterNode.disconnect(); } - cluster.runFor(defaultMillis(LEADER_CHECK_INTERVAL_SETTING) + defaultMillis(LEADER_CHECK_TIMEOUT_SETTING), + cluster.runFor(defaultMillis(LEADER_CHECK_TIMEOUT_SETTING) // to wait for any in-flight check to time out + + defaultMillis(LEADER_CHECK_INTERVAL_SETTING) // to wait for the next check to be sent + + 2 * DEFAULT_DELAY_VARIABILITY, // to send the failing check and receive the disconnection response "waiting for leader failure"); + for (final ClusterNode clusterNode : cluster.clusterNodes) { + assertThat(clusterNode.getId() + " is CANDIDATE", clusterNode.coordinator.getMode(), is(CANDIDATE)); + } + for (int i = scaledRandomIntBetween(1, 10); i >= 0; i--) { final MockLogAppender mockLogAppender = new MockLogAppender(); try { diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java index ce25d24bce6b..496a25e18026 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java @@ -220,7 +220,7 @@ public class LeaderCheckerTests extends ESTestCase { return; } assertThat(action, equalTo(LEADER_CHECK_ACTION_NAME)); - assertTrue(node.equals(leader)); + assertEquals(node, leader); final Response response = responseHolder[0]; deterministicTaskQueue.scheduleNow(new Runnable() { @@ -340,7 +340,7 @@ public class LeaderCheckerTests extends ESTestCase { assertFalse(handler.successfulResponseReceived); assertThat(handler.transportException.getRootCause(), instanceOf(CoordinationStateRejectedException.class)); CoordinationStateRejectedException cause = (CoordinationStateRejectedException) handler.transportException.getRootCause(); - assertThat(cause.getMessage(), equalTo("leader check from unknown node")); + assertThat(cause.getMessage(), equalTo("rejecting leader check since [" + otherNode + "] has been removed from the cluster")); } { @@ -364,7 +364,8 @@ public class LeaderCheckerTests extends ESTestCase { assertFalse(handler.successfulResponseReceived); assertThat(handler.transportException.getRootCause(), instanceOf(CoordinationStateRejectedException.class)); CoordinationStateRejectedException cause = (CoordinationStateRejectedException) handler.transportException.getRootCause(); - assertThat(cause.getMessage(), equalTo("non-leader rejecting leader check")); + assertThat(cause.getMessage(), + equalTo("rejecting leader check from [" + otherNode + "] sent to a node that is no longer the master")); } } @@ -397,7 +398,7 @@ public class LeaderCheckerTests extends ESTestCase { public void testLeaderCheckRequestEqualsHashcodeSerialization() { LeaderCheckRequest request = new LeaderCheckRequest( new DiscoveryNode(randomAlphaOfLength(10), buildNewFakeTransportAddress(), Version.CURRENT)); - // Note: the explicit cast of the CopyFunction is needed for some IDE (specifically Eclipse 4.8.0) to infer the right type + //noinspection RedundantCast since it is needed for some IDEs (specifically Eclipse 4.8.0) to infer the right type EqualsHashCodeTestUtils.checkEqualsAndHashCode(request, (CopyFunction) rq -> copyWriteable(rq, writableRegistry(), LeaderCheckRequest::new), rq -> new LeaderCheckRequest(new DiscoveryNode(randomAlphaOfLength(10), buildNewFakeTransportAddress(), Version.CURRENT))); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java index 893d1f58e8e3..946f4c45f388 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java @@ -360,10 +360,6 @@ public class IndexCreationTaskTests extends ESTestCase { .numberOfReplicas(numReplicas); } - private Map createCustom() { - return Collections.singletonMap("a", "b"); - } - private interface MetaDataBuilderConfigurator { void configure(IndexTemplateMetaData.Builder builder) throws IOException; } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceTests.java index e128c7bc7192..ac180d9e80ae 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceTests.java @@ -465,7 +465,7 @@ public class MetaDataIndexStateServiceTests extends ESTestCase { final ImmutableOpenMap.Builder shardsBuilder = ImmutableOpenMap.builder(); for (ShardRouting shardRouting : newState.routingTable().index(index).randomAllActiveShardsIt()) { - shardsBuilder.put(shardRouting.shardId(), new SnapshotsInProgress.ShardSnapshotStatus(shardRouting.currentNodeId())); + shardsBuilder.put(shardRouting.shardId(), new SnapshotsInProgress.ShardSnapshotStatus(shardRouting.currentNodeId(), "1")); } final Snapshot snapshot = new Snapshot(randomAlphaOfLength(10), new SnapshotId(randomAlphaOfLength(5), randomAlphaOfLength(5))); diff --git a/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java index 6a0ca00111ee..ca35189d8b57 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/ClusterApplierServiceTests.java @@ -359,6 +359,7 @@ public class ClusterApplierServiceTests extends ESTestCase { clusterApplierService.addStateApplier(event -> { throw new RuntimeException("dummy exception"); }); + clusterApplierService.allowClusterStateApplicationFailure(); CountDownLatch latch = new CountDownLatch(1); clusterApplierService.onNewClusterState("test", () -> ClusterState.builder(clusterApplierService.state()).build(), @@ -387,6 +388,7 @@ public class ClusterApplierServiceTests extends ESTestCase { AtomicReference error = new AtomicReference<>(); clusterApplierService.clusterSettings.addSettingsUpdateConsumer(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING, v -> {}); + clusterApplierService.allowClusterStateApplicationFailure(); CountDownLatch latch = new CountDownLatch(1); clusterApplierService.onNewClusterState("test", () -> ClusterState.builder(clusterApplierService.state()) @@ -497,6 +499,7 @@ public class ClusterApplierServiceTests extends ESTestCase { final ClusterSettings clusterSettings; volatile Long currentTimeOverride = null; + boolean applicationMayFail; TimedClusterApplierService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool) { super("test_node", settings, clusterSettings, threadPool); @@ -507,6 +510,15 @@ public class ClusterApplierServiceTests extends ESTestCase { protected long currentTimeInMillis() { return Objects.requireNonNullElseGet(currentTimeOverride, super::currentTimeInMillis); } + + @Override + protected boolean applicationMayFail() { + return this.applicationMayFail; + } + + void allowClusterStateApplicationFailure() { + this.applicationMayFail = true; + } } } diff --git a/server/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java b/server/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java index 0e732d5fb050..0a5f71427dac 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java @@ -400,8 +400,8 @@ public class ClusterServiceIT extends ESIntegTestCase { block1.countDown(); invoked2.await(); - // whenever we test for no tasks, we need to awaitBusy since this is a live node - assertTrue(awaitBusy(() -> clusterService.getMasterService().pendingTasks().isEmpty())); + // whenever we test for no tasks, we need to wait since this is a live node + assertBusy(() -> assertTrue("Pending tasks not empty", clusterService.getMasterService().pendingTasks().isEmpty())); waitNoPendingTasksOnAll(); final CountDownLatch block2 = new CountDownLatch(1); diff --git a/server/src/test/java/org/elasticsearch/cluster/shards/ClusterShardLimitIT.java b/server/src/test/java/org/elasticsearch/cluster/shards/ClusterShardLimitIT.java index 849cc1ad4efb..2c505a1b9216 100644 --- a/server/src/test/java/org/elasticsearch/cluster/shards/ClusterShardLimitIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/shards/ClusterShardLimitIT.java @@ -102,6 +102,7 @@ public class ClusterShardLimitIT extends ESIntegTestCase { assertFalse(clusterState.getMetaData().hasIndex("should-fail")); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/47107") public void testIndexCreationOverLimitFromTemplate() { int dataNodes = client().admin().cluster().prepareState().get().getState().getNodes().getDataNodes().size(); diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java index 7c5cc2bc8026..4c2fcfbb589b 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java @@ -22,7 +22,6 @@ package org.elasticsearch.common.xcontent; import com.fasterxml.jackson.core.JsonGenerationException; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParseException; - import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Constants; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -1208,11 +1207,6 @@ public abstract class BaseXContentTestCase extends ESTestCase { assertThat(e.getMessage(), containsString("Field name cannot be null")); } - private static void expectNonNullFormatterException(ThrowingRunnable runnable) { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, runnable); - assertThat(e.getMessage(), containsString("DateTimeFormatter cannot be null")); - } - private static void expectObjectException(ThrowingRunnable runnable) { JsonGenerationException e = expectThrows(JsonGenerationException.class, runnable); assertThat(e.getMessage(), containsString("Current context not Object")); diff --git a/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java b/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java index b4ead893846b..33397ae58028 100644 --- a/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes.Builder; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.discovery.PeerFinder.TransportAddressConnector; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.CapturingTransport; @@ -215,9 +214,11 @@ public class PeerFinderTests extends ESTestCase { = new ConnectionManager(settings, capturingTransport); StubbableConnectionManager connectionManager = new StubbableConnectionManager(innerConnectionManager, settings, capturingTransport); - connectionManager.setDefaultNodeConnectedBehavior(cm -> { - assertTrue(Sets.haveEmptyIntersection(connectedNodes, disconnectedNodes)); - return connectedNodes; + connectionManager.setDefaultNodeConnectedBehavior((cm, discoveryNode) -> { + final boolean isConnected = connectedNodes.contains(discoveryNode); + final boolean isDisconnected = disconnectedNodes.contains(discoveryNode); + assert isConnected != isDisconnected : discoveryNode + ": isConnected=" + isConnected + ", isDisconnected=" + isDisconnected; + return isConnected; }); connectionManager.setDefaultGetConnectionBehavior((cm, discoveryNode) -> capturingTransport.createConnection(discoveryNode)); transportService = new TransportService(settings, capturingTransport, deterministicTaskQueue.getThreadPool(), diff --git a/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java b/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java index 1f57c7a89bb6..00f2a676f47b 100644 --- a/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java +++ b/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java @@ -54,39 +54,36 @@ public class ExplainActionIT extends ESIntegTestCase { client().prepareIndex("test", "test", "1").setSource("field", "value1").get(); - ExplainResponse response = client().prepareExplain(indexOrAlias(), "test", "1") + ExplainResponse response = client().prepareExplain(indexOrAlias(), "1") .setQuery(QueryBuilders.matchAllQuery()).get(); assertNotNull(response); assertFalse(response.isExists()); // not a match b/c not realtime assertThat(response.getIndex(), equalTo("test")); - assertThat(response.getType(), equalTo("test")); assertThat(response.getId(), equalTo("1")); assertFalse(response.isMatch()); // not a match b/c not realtime refresh(); - response = client().prepareExplain(indexOrAlias(), "test", "1") + response = client().prepareExplain(indexOrAlias(), "1") .setQuery(QueryBuilders.matchAllQuery()).get(); assertNotNull(response); assertTrue(response.isMatch()); assertNotNull(response.getExplanation()); assertTrue(response.getExplanation().isMatch()); assertThat(response.getIndex(), equalTo("test")); - assertThat(response.getType(), equalTo("test")); assertThat(response.getId(), equalTo("1")); assertThat(response.getExplanation().getValue(), equalTo(1.0f)); - response = client().prepareExplain(indexOrAlias(), "test", "1") + response = client().prepareExplain(indexOrAlias(), "1") .setQuery(QueryBuilders.termQuery("field", "value2")).get(); assertNotNull(response); assertTrue(response.isExists()); assertFalse(response.isMatch()); assertThat(response.getIndex(), equalTo("test")); - assertThat(response.getType(), equalTo("test")); assertThat(response.getId(), equalTo("1")); assertNotNull(response.getExplanation()); assertFalse(response.getExplanation().isMatch()); - response = client().prepareExplain(indexOrAlias(), "test", "1") + response = client().prepareExplain(indexOrAlias(), "1") .setQuery(QueryBuilders.boolQuery() .must(QueryBuilders.termQuery("field", "value1")) .must(QueryBuilders.termQuery("field", "value2"))).get(); @@ -94,19 +91,17 @@ public class ExplainActionIT extends ESIntegTestCase { assertTrue(response.isExists()); assertFalse(response.isMatch()); assertThat(response.getIndex(), equalTo("test")); - assertThat(response.getType(), equalTo("test")); assertThat(response.getId(), equalTo("1")); assertNotNull(response.getExplanation()); assertFalse(response.getExplanation().isMatch()); assertThat(response.getExplanation().getDetails().length, equalTo(2)); - response = client().prepareExplain(indexOrAlias(), "test", "2") + response = client().prepareExplain(indexOrAlias(), "2") .setQuery(QueryBuilders.matchAllQuery()).get(); assertNotNull(response); assertFalse(response.isExists()); assertFalse(response.isMatch()); assertThat(response.getIndex(), equalTo("test")); - assertThat(response.getType(), equalTo("test")); assertThat(response.getId(), equalTo("2")); } @@ -126,7 +121,7 @@ public class ExplainActionIT extends ESIntegTestCase { .endObject()).get(); refresh(); - ExplainResponse response = client().prepareExplain(indexOrAlias(), "test", "1") + ExplainResponse response = client().prepareExplain(indexOrAlias(), "1") .setQuery(QueryBuilders.matchAllQuery()) .setStoredFields("obj1.field1").get(); assertNotNull(response); @@ -142,7 +137,7 @@ public class ExplainActionIT extends ESIntegTestCase { assertThat(response.getGetResult().isSourceEmpty(), equalTo(true)); refresh(); - response = client().prepareExplain(indexOrAlias(), "test", "1") + response = client().prepareExplain(indexOrAlias(), "1") .setQuery(QueryBuilders.matchAllQuery()) .setStoredFields("obj1.field1").setFetchSource(true).get(); assertNotNull(response); @@ -157,13 +152,13 @@ public class ExplainActionIT extends ESIntegTestCase { assertThat(response.getGetResult().getFields().get("obj1.field1").getValue().toString(), equalTo("value1")); assertThat(response.getGetResult().isSourceEmpty(), equalTo(false)); - response = client().prepareExplain(indexOrAlias(), "test", "1") + response = client().prepareExplain(indexOrAlias(), "1") .setQuery(QueryBuilders.matchAllQuery()) .setStoredFields("obj1.field1", "obj1.field2").get(); assertNotNull(response); assertTrue(response.isMatch()); - String v1 = (String) response.getGetResult().field("obj1.field1").getValue(); - String v2 = (String) response.getGetResult().field("obj1.field2").getValue(); + String v1 = response.getGetResult().field("obj1.field1").getValue(); + String v2 = response.getGetResult().field("obj1.field2").getValue(); assertThat(v1, equalTo("value1")); assertThat(v2, equalTo("value2")); } @@ -183,7 +178,7 @@ public class ExplainActionIT extends ESIntegTestCase { .endObject()).get(); refresh(); - ExplainResponse response = client().prepareExplain(indexOrAlias(), "test", "1") + ExplainResponse response = client().prepareExplain(indexOrAlias(), "1") .setQuery(QueryBuilders.matchAllQuery()) .setFetchSource("obj1.field1", null).get(); assertNotNull(response); @@ -196,7 +191,7 @@ public class ExplainActionIT extends ESIntegTestCase { assertThat(response.getGetResult().getSource().size(), equalTo(1)); assertThat(((Map) response.getGetResult().getSource().get("obj1")).get("field1").toString(), equalTo("value1")); - response = client().prepareExplain(indexOrAlias(), "test", "1") + response = client().prepareExplain(indexOrAlias(), "1") .setQuery(QueryBuilders.matchAllQuery()) .setFetchSource(null, "obj1.field2").get(); assertNotNull(response); @@ -204,7 +199,7 @@ public class ExplainActionIT extends ESIntegTestCase { assertThat(((Map) response.getGetResult().getSource().get("obj1")).get("field1").toString(), equalTo("value1")); } - public void testExplainWithFilteredAlias() throws Exception { + public void testExplainWithFilteredAlias() { assertAcked(prepareCreate("test") .addMapping("test", "field2", "type=text") .addAlias(new Alias("alias1").filter(QueryBuilders.termQuery("field2", "value2")))); @@ -213,14 +208,14 @@ public class ExplainActionIT extends ESIntegTestCase { client().prepareIndex("test", "test", "1").setSource("field1", "value1", "field2", "value1").get(); refresh(); - ExplainResponse response = client().prepareExplain("alias1", "test", "1") + ExplainResponse response = client().prepareExplain("alias1", "1") .setQuery(QueryBuilders.matchAllQuery()).get(); assertNotNull(response); assertTrue(response.isExists()); assertFalse(response.isMatch()); } - public void testExplainWithFilteredAliasFetchSource() throws Exception { + public void testExplainWithFilteredAliasFetchSource() { assertAcked(client().admin().indices().prepareCreate("test") .addMapping("test", "field2", "type=text") .addAlias(new Alias("alias1").filter(QueryBuilders.termQuery("field2", "value2")))); @@ -229,20 +224,19 @@ public class ExplainActionIT extends ESIntegTestCase { client().prepareIndex("test", "test", "1").setSource("field1", "value1", "field2", "value1").get(); refresh(); - ExplainResponse response = client().prepareExplain("alias1", "test", "1") + ExplainResponse response = client().prepareExplain("alias1", "1") .setQuery(QueryBuilders.matchAllQuery()).setFetchSource(true).get(); assertNotNull(response); assertTrue(response.isExists()); assertFalse(response.isMatch()); assertThat(response.getIndex(), equalTo("test")); - assertThat(response.getType(), equalTo("test")); assertThat(response.getId(), equalTo("1")); assertThat(response.getGetResult(), notNullValue()); assertThat(response.getGetResult().getIndex(), equalTo("test")); assertThat(response.getGetResult().getId(), equalTo("1")); assertThat(response.getGetResult().getSource(), notNullValue()); - assertThat((String)response.getGetResult().getSource().get("field1"), equalTo("value1")); + assertThat(response.getGetResult().getSource().get("field1"), equalTo("value1")); } public void testExplainDateRangeInQueryString() { @@ -256,7 +250,7 @@ public class ExplainActionIT extends ESIntegTestCase { refresh(); - ExplainResponse explainResponse = client().prepareExplain("test", "type", "1") + ExplainResponse explainResponse = client().prepareExplain("test", "1") .setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).get(); assertThat(explainResponse.isExists(), equalTo(true)); assertThat(explainResponse.isMatch(), equalTo(true)); diff --git a/server/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java b/server/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java index 2a38942eb5a2..b8b29dc722df 100644 --- a/server/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java @@ -328,7 +328,7 @@ public class AsyncShardFetchTests extends ESTestCase { entry = simulations.get(nodeId); if (entry == null) { // we are simulating a master node switch, wait for it to not be null - awaitBusy(() -> simulations.containsKey(nodeId)); + assertBusy(() -> assertTrue(simulations.containsKey(nodeId))); } assert entry != null; entry.executeLatch.await(); diff --git a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java index 107cc7541fe0..e723d08d7352 100644 --- a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java @@ -24,6 +24,8 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.coordination.CoordinationMetaData; import org.elasticsearch.cluster.coordination.CoordinationMetaData.VotingConfigExclusion; +import org.elasticsearch.cluster.coordination.CoordinationState; +import org.elasticsearch.cluster.coordination.InMemoryPersistedState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.Manifest; import org.elasticsearch.cluster.metadata.MetaData; @@ -35,10 +37,10 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Collections; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; public class GatewayMetaStatePersistedStateTests extends ESTestCase { @@ -63,21 +65,23 @@ public class GatewayMetaStatePersistedStateTests extends ESTestCase { super.tearDown(); } - private MockGatewayMetaState newGateway() { - final MockGatewayMetaState gateway = new MockGatewayMetaState(settings, nodeEnvironment, xContentRegistry(), localNode); - gateway.start(); - return gateway; + private CoordinationState.PersistedState newGatewayPersistedState() { + final MockGatewayMetaState gateway = new MockGatewayMetaState(localNode); + gateway.start(settings, nodeEnvironment, xContentRegistry()); + final CoordinationState.PersistedState persistedState = gateway.getPersistedState(); + assertThat(persistedState, not(instanceOf(InMemoryPersistedState.class))); + return persistedState; } - private MockGatewayMetaState maybeNew(MockGatewayMetaState gateway) throws IOException { + private CoordinationState.PersistedState maybeNew(CoordinationState.PersistedState persistedState) { if (randomBoolean()) { - return newGateway(); + return newGatewayPersistedState(); } - return gateway; + return persistedState; } - public void testInitialState() throws IOException { - MockGatewayMetaState gateway = newGateway(); + public void testInitialState() { + CoordinationState.PersistedState gateway = newGatewayPersistedState(); ClusterState state = gateway.getLastAcceptedState(); assertThat(state.getClusterName(), equalTo(clusterName)); assertTrue(MetaData.isGlobalStateEquals(state.metaData(), MetaData.EMPTY_META_DATA)); @@ -88,8 +92,8 @@ public class GatewayMetaStatePersistedStateTests extends ESTestCase { assertThat(currentTerm, equalTo(Manifest.empty().getCurrentTerm())); } - public void testSetCurrentTerm() throws IOException { - MockGatewayMetaState gateway = newGateway(); + public void testSetCurrentTerm() { + CoordinationState.PersistedState gateway = newGatewayPersistedState(); for (int i = 0; i < randomIntBetween(1, 5); i++) { final long currentTerm = randomNonNegativeLong(); @@ -142,8 +146,8 @@ public class GatewayMetaStatePersistedStateTests extends ESTestCase { } } - public void testSetLastAcceptedState() throws IOException { - MockGatewayMetaState gateway = newGateway(); + public void testSetLastAcceptedState() { + CoordinationState.PersistedState gateway = newGatewayPersistedState(); final long term = randomNonNegativeLong(); for (int i = 0; i < randomIntBetween(1, 5); i++) { @@ -165,8 +169,8 @@ public class GatewayMetaStatePersistedStateTests extends ESTestCase { } } - public void testSetLastAcceptedStateTermChanged() throws IOException { - MockGatewayMetaState gateway = newGateway(); + public void testSetLastAcceptedStateTermChanged() { + CoordinationState.PersistedState gateway = newGatewayPersistedState(); final String indexName = randomAlphaOfLength(10); final int numberOfShards = randomIntBetween(1, 5); @@ -178,7 +182,7 @@ public class GatewayMetaStatePersistedStateTests extends ESTestCase { gateway.setLastAcceptedState(state); gateway = maybeNew(gateway); - final long newTerm = randomValueOtherThan(term, () -> randomNonNegativeLong()); + final long newTerm = randomValueOtherThan(term, ESTestCase::randomNonNegativeLong); final int newNumberOfShards = randomValueOtherThan(numberOfShards, () -> randomIntBetween(1,5)); final IndexMetaData newIndexMetaData = createIndexMetaData(indexName, newNumberOfShards, version); final ClusterState newClusterState = createClusterState(randomNonNegativeLong(), @@ -189,11 +193,11 @@ public class GatewayMetaStatePersistedStateTests extends ESTestCase { assertThat(gateway.getLastAcceptedState().metaData().index(indexName), equalTo(newIndexMetaData)); } - public void testCurrentTermAndTermAreDifferent() throws IOException { - MockGatewayMetaState gateway = newGateway(); + public void testCurrentTermAndTermAreDifferent() { + CoordinationState.PersistedState gateway = newGatewayPersistedState(); long currentTerm = randomNonNegativeLong(); - long term = randomValueOtherThan(currentTerm, () -> randomNonNegativeLong()); + long term = randomValueOtherThan(currentTerm, ESTestCase::randomNonNegativeLong); gateway.setCurrentTerm(currentTerm); gateway.setLastAcceptedState(createClusterState(randomNonNegativeLong(), @@ -204,8 +208,8 @@ public class GatewayMetaStatePersistedStateTests extends ESTestCase { assertThat(gateway.getLastAcceptedState().coordinationMetaData().term(), equalTo(term)); } - public void testMarkAcceptedConfigAsCommitted() throws IOException { - MockGatewayMetaState gateway = newGateway(); + public void testMarkAcceptedConfigAsCommitted() { + CoordinationState.PersistedState gateway = newGatewayPersistedState(); //generate random coordinationMetaData with different lastAcceptedConfiguration and lastCommittedConfiguration CoordinationMetaData coordinationMetaData; diff --git a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java index 2f9c290717a7..d0101f276d8f 100644 --- a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java @@ -19,417 +19,24 @@ package org.elasticsearch.gateway; -import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockDirectoryWrapper; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ESAllocationTestCase; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; -import org.elasticsearch.cluster.metadata.Manifest; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; -import org.elasticsearch.cluster.node.DiscoveryNodeRole; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.index.Index; import org.elasticsearch.plugins.MetaDataUpgrader; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TestCustomMetaData; -import org.mockito.ArgumentCaptor; -import java.io.IOException; -import java.nio.file.Path; import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; -import static org.mockito.Matchers.anyString; -import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; -import static org.mockito.Mockito.when; -public class GatewayMetaStateTests extends ESAllocationTestCase { - - private ClusterState noIndexClusterState(boolean masterEligible) { - MetaData metaData = MetaData.builder().build(); - RoutingTable routingTable = RoutingTable.builder().build(); - - return ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) - .metaData(metaData) - .routingTable(routingTable) - .nodes(generateDiscoveryNodes(masterEligible)) - .build(); - } - - private ClusterState clusterStateWithUnassignedIndex(IndexMetaData indexMetaData, boolean masterEligible) { - MetaData metaData = MetaData.builder() - .put(indexMetaData, false) - .build(); - - RoutingTable routingTable = RoutingTable.builder() - .addAsNew(metaData.index("test")) - .build(); - - return ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) - .metaData(metaData) - .routingTable(routingTable) - .nodes(generateDiscoveryNodes(masterEligible)) - .build(); - } - - private ClusterState clusterStateWithAssignedIndex(IndexMetaData indexMetaData, boolean masterEligible) { - AllocationService strategy = createAllocationService(Settings.builder() - .put("cluster.routing.allocation.node_concurrent_recoveries", 100) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") - .put("cluster.routing.allocation.cluster_concurrent_rebalance", 100) - .put("cluster.routing.allocation.node_initial_primaries_recoveries", 100) - .build()); - - ClusterState oldClusterState = clusterStateWithUnassignedIndex(indexMetaData, masterEligible); - RoutingTable routingTable = strategy.reroute(oldClusterState, "reroute").routingTable(); - - MetaData metaDataNewClusterState = MetaData.builder() - .put(oldClusterState.metaData().index("test"), false) - .build(); - - return ClusterState.builder(oldClusterState).routingTable(routingTable) - .metaData(metaDataNewClusterState).version(oldClusterState.getVersion() + 1).build(); - } - - private ClusterState clusterStateWithClosedIndex(IndexMetaData indexMetaData, boolean masterEligible) { - ClusterState oldClusterState = clusterStateWithAssignedIndex(indexMetaData, masterEligible); - - MetaData metaDataNewClusterState = MetaData.builder() - .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).state(IndexMetaData.State.CLOSE) - .numberOfShards(5).numberOfReplicas(2)) - .version(oldClusterState.metaData().version() + 1) - .build(); - RoutingTable routingTable = RoutingTable.builder() - .addAsNew(metaDataNewClusterState.index("test")) - .build(); - - return ClusterState.builder(oldClusterState).routingTable(routingTable) - .metaData(metaDataNewClusterState).version(oldClusterState.getVersion() + 1).build(); - } - - private ClusterState clusterStateWithJustOpenedIndex(IndexMetaData indexMetaData, boolean masterEligible) { - ClusterState oldClusterState = clusterStateWithClosedIndex(indexMetaData, masterEligible); - - MetaData metaDataNewClusterState = MetaData.builder() - .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).state(IndexMetaData.State.OPEN) - .numberOfShards(5).numberOfReplicas(2)) - .version(oldClusterState.metaData().version() + 1) - .build(); - - return ClusterState.builder(oldClusterState) - .metaData(metaDataNewClusterState).version(oldClusterState.getVersion() + 1).build(); - } - - private DiscoveryNodes.Builder generateDiscoveryNodes(boolean masterEligible) { - Set dataOnlyRoles = Set.of(DiscoveryNodeRole.DATA_ROLE); - return DiscoveryNodes.builder().add(newNode("node1", masterEligible ? MASTER_DATA_ROLES : dataOnlyRoles)) - .add(newNode("master_node", MASTER_DATA_ROLES)).localNodeId("node1").masterNodeId(masterEligible ? "node1" : "master_node"); - } - - private Set randomPrevWrittenIndices(IndexMetaData indexMetaData) { - if (randomBoolean()) { - return Collections.singleton(indexMetaData.getIndex()); - } else { - return Collections.emptySet(); - } - } - - private IndexMetaData createIndexMetaData(String name) { - return IndexMetaData.builder(name). - settings(settings(Version.CURRENT)). - numberOfShards(5). - numberOfReplicas(2). - build(); - } - - public void testGetRelevantIndicesWithUnassignedShardsOnMasterEligibleNode() { - IndexMetaData indexMetaData = createIndexMetaData("test"); - Set indices = GatewayMetaState.getRelevantIndices( - clusterStateWithUnassignedIndex(indexMetaData, true), - noIndexClusterState(true), - randomPrevWrittenIndices(indexMetaData)); - assertThat(indices.size(), equalTo(1)); - } - - public void testGetRelevantIndicesWithUnassignedShardsOnDataOnlyNode() { - IndexMetaData indexMetaData = createIndexMetaData("test"); - Set indices = GatewayMetaState.getRelevantIndices( - clusterStateWithUnassignedIndex(indexMetaData, false), - noIndexClusterState(false), - randomPrevWrittenIndices(indexMetaData)); - assertThat(indices.size(), equalTo(0)); - } - - public void testGetRelevantIndicesWithAssignedShards() { - IndexMetaData indexMetaData = createIndexMetaData("test"); - boolean masterEligible = randomBoolean(); - Set indices = GatewayMetaState.getRelevantIndices( - clusterStateWithAssignedIndex(indexMetaData, masterEligible), - clusterStateWithUnassignedIndex(indexMetaData, masterEligible), - randomPrevWrittenIndices(indexMetaData)); - assertThat(indices.size(), equalTo(1)); - } - - public void testGetRelevantIndicesForClosedPrevWrittenIndexOnDataOnlyNode() { - IndexMetaData indexMetaData = createIndexMetaData("test"); - Set indices = GatewayMetaState.getRelevantIndices( - clusterStateWithClosedIndex(indexMetaData, false), - clusterStateWithAssignedIndex(indexMetaData, false), - Collections.singleton(indexMetaData.getIndex())); - assertThat(indices.size(), equalTo(1)); - } - - public void testGetRelevantIndicesForClosedPrevNotWrittenIndexOnDataOnlyNode() { - IndexMetaData indexMetaData = createIndexMetaData("test"); - Set indices = GatewayMetaState.getRelevantIndices( - clusterStateWithJustOpenedIndex(indexMetaData, false), - clusterStateWithClosedIndex(indexMetaData, false), - Collections.emptySet()); - assertThat(indices.size(), equalTo(0)); - } - - public void testGetRelevantIndicesForWasClosedPrevWrittenIndexOnDataOnlyNode() { - IndexMetaData indexMetaData = createIndexMetaData("test"); - Set indices = GatewayMetaState.getRelevantIndices( - clusterStateWithJustOpenedIndex(indexMetaData, false), - clusterStateWithClosedIndex(indexMetaData, false), - Collections.singleton(indexMetaData.getIndex())); - assertThat(indices.size(), equalTo(1)); - } - - public void testResolveStatesToBeWritten() throws WriteStateException { - Map indices = new HashMap<>(); - Set relevantIndices = new HashSet<>(); - - IndexMetaData removedIndex = createIndexMetaData("removed_index"); - indices.put(removedIndex.getIndex(), 1L); - - IndexMetaData versionChangedIndex = createIndexMetaData("version_changed_index"); - indices.put(versionChangedIndex.getIndex(), 2L); - relevantIndices.add(versionChangedIndex.getIndex()); - - IndexMetaData notChangedIndex = createIndexMetaData("not_changed_index"); - indices.put(notChangedIndex.getIndex(), 3L); - relevantIndices.add(notChangedIndex.getIndex()); - - IndexMetaData newIndex = createIndexMetaData("new_index"); - relevantIndices.add(newIndex.getIndex()); - - MetaData oldMetaData = MetaData.builder() - .put(removedIndex, false) - .put(versionChangedIndex, false) - .put(notChangedIndex, false) - .build(); - - MetaData newMetaData = MetaData.builder() - .put(versionChangedIndex, true) - .put(notChangedIndex, false) - .put(newIndex, false) - .build(); - - IndexMetaData newVersionChangedIndex = newMetaData.index(versionChangedIndex.getIndex()); - - List actions = - GatewayMetaState.resolveIndexMetaDataActions(indices, relevantIndices, oldMetaData, newMetaData); - - assertThat(actions, hasSize(3)); - - for (GatewayMetaState.IndexMetaDataAction action : actions) { - if (action instanceof GatewayMetaState.KeepPreviousGeneration) { - assertThat(action.getIndex(), equalTo(notChangedIndex.getIndex())); - GatewayMetaState.AtomicClusterStateWriter writer = mock(GatewayMetaState.AtomicClusterStateWriter.class); - assertThat(action.execute(writer), equalTo(3L)); - verifyZeroInteractions(writer); - } - if (action instanceof GatewayMetaState.WriteNewIndexMetaData) { - assertThat(action.getIndex(), equalTo(newIndex.getIndex())); - GatewayMetaState.AtomicClusterStateWriter writer = mock(GatewayMetaState.AtomicClusterStateWriter.class); - when(writer.writeIndex("freshly created", newIndex)).thenReturn(0L); - assertThat(action.execute(writer), equalTo(0L)); - } - if (action instanceof GatewayMetaState.WriteChangedIndexMetaData) { - assertThat(action.getIndex(), equalTo(newVersionChangedIndex.getIndex())); - GatewayMetaState.AtomicClusterStateWriter writer = mock(GatewayMetaState.AtomicClusterStateWriter.class); - when(writer.writeIndex(anyString(), eq(newVersionChangedIndex))).thenReturn(3L); - assertThat(action.execute(writer), equalTo(3L)); - ArgumentCaptor reason = ArgumentCaptor.forClass(String.class); - verify(writer).writeIndex(reason.capture(), eq(newVersionChangedIndex)); - assertThat(reason.getValue(), containsString(Long.toString(versionChangedIndex.getVersion()))); - assertThat(reason.getValue(), containsString(Long.toString(newVersionChangedIndex.getVersion()))); - } - } - } - - private static class MetaStateServiceWithFailures extends MetaStateService { - private final int invertedFailRate; - private boolean failRandomly; - - private MetaDataStateFormat wrap(MetaDataStateFormat format) { - return new MetaDataStateFormat(format.getPrefix()) { - @Override - public void toXContent(XContentBuilder builder, T state) throws IOException { - format.toXContent(builder, state); - } - - @Override - public T fromXContent(XContentParser parser) throws IOException { - return format.fromXContent(parser); - } - - @Override - protected Directory newDirectory(Path dir) { - MockDirectoryWrapper mock = newMockFSDirectory(dir); - if (failRandomly) { - MockDirectoryWrapper.Failure fail = new MockDirectoryWrapper.Failure() { - @Override - public void eval(MockDirectoryWrapper dir) throws IOException { - int r = randomIntBetween(0, invertedFailRate); - if (r == 0) { - throw new MockDirectoryWrapper.FakeIOException(); - } - } - }; - mock.failOn(fail); - } - closeAfterSuite(mock); - return mock; - } - }; - } - - MetaStateServiceWithFailures(int invertedFailRate, NodeEnvironment nodeEnv, NamedXContentRegistry namedXContentRegistry) { - super(nodeEnv, namedXContentRegistry); - META_DATA_FORMAT = wrap(MetaData.FORMAT); - INDEX_META_DATA_FORMAT = wrap(IndexMetaData.FORMAT); - MANIFEST_FORMAT = wrap(Manifest.FORMAT); - failRandomly = false; - this.invertedFailRate = invertedFailRate; - } - - void failRandomly() { - failRandomly = true; - } - - void noFailures() { - failRandomly = false; - } - } - - private boolean metaDataEquals(MetaData md1, MetaData md2) { - boolean equals = MetaData.isGlobalStateEquals(md1, md2); - - for (IndexMetaData imd : md1) { - IndexMetaData imd2 = md2.index(imd.getIndex()); - equals = equals && imd.equals(imd2); - } - - for (IndexMetaData imd : md2) { - IndexMetaData imd2 = md1.index(imd.getIndex()); - equals = equals && imd.equals(imd2); - } - return equals; - } - - private static MetaData randomMetaDataForTx() { - int settingNo = randomIntBetween(0, 10); - MetaData.Builder builder = MetaData.builder() - .persistentSettings(Settings.builder().put("setting" + settingNo, randomAlphaOfLength(5)).build()); - int numOfIndices = randomIntBetween(0, 3); - - for (int i = 0; i < numOfIndices; i++) { - int indexNo = randomIntBetween(0, 50); - IndexMetaData indexMetaData = IndexMetaData.builder("index" + indexNo).settings( - Settings.builder() - .put(IndexMetaData.SETTING_INDEX_UUID, "index" + indexNo) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .build() - ).build(); - builder.put(indexMetaData, false); - } - return builder.build(); - } - - public void testAtomicityWithFailures() throws IOException { - try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateServiceWithFailures metaStateService = - new MetaStateServiceWithFailures(randomIntBetween(100, 1000), env, xContentRegistry()); - - // We only guarantee atomicity of writes, if there is initial Manifest file - Manifest manifest = Manifest.empty(); - MetaData metaData = MetaData.EMPTY_META_DATA; - metaStateService.writeManifestAndCleanup("startup", Manifest.empty()); - long currentTerm = randomNonNegativeLong(); - long clusterStateVersion = randomNonNegativeLong(); - - metaStateService.failRandomly(); - Set possibleMetaData = new HashSet<>(); - possibleMetaData.add(metaData); - - for (int i = 0; i < randomIntBetween(1, 5); i++) { - GatewayMetaState.AtomicClusterStateWriter writer = - new GatewayMetaState.AtomicClusterStateWriter(metaStateService, manifest); - metaData = randomMetaDataForTx(); - Map indexGenerations = new HashMap<>(); - - try { - long globalGeneration = writer.writeGlobalState("global", metaData); - - for (IndexMetaData indexMetaData : metaData) { - long generation = writer.writeIndex("index", indexMetaData); - indexGenerations.put(indexMetaData.getIndex(), generation); - } - - Manifest newManifest = new Manifest(currentTerm, clusterStateVersion, globalGeneration, indexGenerations); - writer.writeManifestAndCleanup("manifest", newManifest); - possibleMetaData.clear(); - possibleMetaData.add(metaData); - manifest = newManifest; - } catch (WriteStateException e) { - if (e.isDirty()) { - possibleMetaData.add(metaData); - /* - * If dirty WriteStateException occurred, it's only safe to proceed if there is subsequent - * successful write of metadata and Manifest. We prefer to break here, not to over complicate test logic. - * See also MetaDataStateFormat#testFailRandomlyAndReadAnyState, that does not break. - */ - break; - } - } - } - - metaStateService.noFailures(); - - Tuple manifestAndMetaData = metaStateService.loadFullState(); - MetaData loadedMetaData = manifestAndMetaData.v2(); - - assertTrue(possibleMetaData.stream().anyMatch(md -> metaDataEquals(md, loadedMetaData))); - } - } +public class GatewayMetaStateTests extends ESTestCase { public void testAddCustomMetaDataOnUpgrade() throws Exception { MetaData metaData = randomMetaData(); diff --git a/server/src/test/java/org/elasticsearch/gateway/IncrementalClusterStateWriterTests.java b/server/src/test/java/org/elasticsearch/gateway/IncrementalClusterStateWriterTests.java new file mode 100644 index 000000000000..d5a03dee70e1 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/gateway/IncrementalClusterStateWriterTests.java @@ -0,0 +1,535 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gateway; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.MockDirectoryWrapper; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ESAllocationTestCase; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.Manifest; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.allocation.AllocationService; +import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.index.Index; +import org.elasticsearch.test.MockLogAppender; +import org.elasticsearch.test.junit.annotations.TestLogging; +import org.mockito.ArgumentCaptor; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicLong; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.lessThan; +import static org.mockito.Matchers.anyString; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class IncrementalClusterStateWriterTests extends ESAllocationTestCase { + + private ClusterState noIndexClusterState(boolean masterEligible) { + MetaData metaData = MetaData.builder().build(); + RoutingTable routingTable = RoutingTable.builder().build(); + + return ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) + .metaData(metaData) + .routingTable(routingTable) + .nodes(generateDiscoveryNodes(masterEligible)) + .build(); + } + + private ClusterState clusterStateWithUnassignedIndex(IndexMetaData indexMetaData, boolean masterEligible) { + MetaData metaData = MetaData.builder() + .put(indexMetaData, false) + .build(); + + RoutingTable routingTable = RoutingTable.builder() + .addAsNew(metaData.index("test")) + .build(); + + return ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) + .metaData(metaData) + .routingTable(routingTable) + .nodes(generateDiscoveryNodes(masterEligible)) + .build(); + } + + private ClusterState clusterStateWithAssignedIndex(IndexMetaData indexMetaData, boolean masterEligible) { + AllocationService strategy = createAllocationService(Settings.builder() + .put("cluster.routing.allocation.node_concurrent_recoveries", 100) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") + .put("cluster.routing.allocation.cluster_concurrent_rebalance", 100) + .put("cluster.routing.allocation.node_initial_primaries_recoveries", 100) + .build()); + + ClusterState oldClusterState = clusterStateWithUnassignedIndex(indexMetaData, masterEligible); + RoutingTable routingTable = strategy.reroute(oldClusterState, "reroute").routingTable(); + + MetaData metaDataNewClusterState = MetaData.builder() + .put(oldClusterState.metaData().index("test"), false) + .build(); + + return ClusterState.builder(oldClusterState).routingTable(routingTable) + .metaData(metaDataNewClusterState).version(oldClusterState.getVersion() + 1).build(); + } + + private ClusterState clusterStateWithClosedIndex(IndexMetaData indexMetaData, boolean masterEligible) { + ClusterState oldClusterState = clusterStateWithAssignedIndex(indexMetaData, masterEligible); + + MetaData metaDataNewClusterState = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).state(IndexMetaData.State.CLOSE) + .numberOfShards(5).numberOfReplicas(2)) + .version(oldClusterState.metaData().version() + 1) + .build(); + RoutingTable routingTable = RoutingTable.builder() + .addAsNew(metaDataNewClusterState.index("test")) + .build(); + + return ClusterState.builder(oldClusterState).routingTable(routingTable) + .metaData(metaDataNewClusterState).version(oldClusterState.getVersion() + 1).build(); + } + + private ClusterState clusterStateWithJustOpenedIndex(IndexMetaData indexMetaData, boolean masterEligible) { + ClusterState oldClusterState = clusterStateWithClosedIndex(indexMetaData, masterEligible); + + MetaData metaDataNewClusterState = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).state(IndexMetaData.State.OPEN) + .numberOfShards(5).numberOfReplicas(2)) + .version(oldClusterState.metaData().version() + 1) + .build(); + + return ClusterState.builder(oldClusterState) + .metaData(metaDataNewClusterState).version(oldClusterState.getVersion() + 1).build(); + } + + private DiscoveryNodes.Builder generateDiscoveryNodes(boolean masterEligible) { + Set dataOnlyRoles = Collections.singleton(DiscoveryNodeRole.DATA_ROLE); + return DiscoveryNodes.builder().add(newNode("node1", masterEligible ? MASTER_DATA_ROLES : dataOnlyRoles)) + .add(newNode("master_node", MASTER_DATA_ROLES)).localNodeId("node1").masterNodeId(masterEligible ? "node1" : "master_node"); + } + + private Set randomPrevWrittenIndices(IndexMetaData indexMetaData) { + if (randomBoolean()) { + return Collections.singleton(indexMetaData.getIndex()); + } else { + return Collections.emptySet(); + } + } + + private IndexMetaData createIndexMetaData(String name) { + return IndexMetaData.builder(name). + settings(settings(Version.CURRENT)). + numberOfShards(5). + numberOfReplicas(2). + build(); + } + + public void testGetRelevantIndicesWithUnassignedShardsOnMasterEligibleNode() { + IndexMetaData indexMetaData = createIndexMetaData("test"); + Set indices = IncrementalClusterStateWriter.getRelevantIndices( + clusterStateWithUnassignedIndex(indexMetaData, true), + noIndexClusterState(true), + randomPrevWrittenIndices(indexMetaData)); + assertThat(indices.size(), equalTo(1)); + } + + public void testGetRelevantIndicesWithUnassignedShardsOnDataOnlyNode() { + IndexMetaData indexMetaData = createIndexMetaData("test"); + Set indices = IncrementalClusterStateWriter.getRelevantIndices( + clusterStateWithUnassignedIndex(indexMetaData, false), + noIndexClusterState(false), + randomPrevWrittenIndices(indexMetaData)); + assertThat(indices.size(), equalTo(0)); + } + + public void testGetRelevantIndicesWithAssignedShards() { + IndexMetaData indexMetaData = createIndexMetaData("test"); + boolean masterEligible = randomBoolean(); + Set indices = IncrementalClusterStateWriter.getRelevantIndices( + clusterStateWithAssignedIndex(indexMetaData, masterEligible), + clusterStateWithUnassignedIndex(indexMetaData, masterEligible), + randomPrevWrittenIndices(indexMetaData)); + assertThat(indices.size(), equalTo(1)); + } + + public void testGetRelevantIndicesForClosedPrevWrittenIndexOnDataOnlyNode() { + IndexMetaData indexMetaData = createIndexMetaData("test"); + Set indices = IncrementalClusterStateWriter.getRelevantIndices( + clusterStateWithClosedIndex(indexMetaData, false), + clusterStateWithAssignedIndex(indexMetaData, false), + Collections.singleton(indexMetaData.getIndex())); + assertThat(indices.size(), equalTo(1)); + } + + public void testGetRelevantIndicesForClosedPrevNotWrittenIndexOnDataOnlyNode() { + IndexMetaData indexMetaData = createIndexMetaData("test"); + Set indices = IncrementalClusterStateWriter.getRelevantIndices( + clusterStateWithJustOpenedIndex(indexMetaData, false), + clusterStateWithClosedIndex(indexMetaData, false), + Collections.emptySet()); + assertThat(indices.size(), equalTo(0)); + } + + public void testGetRelevantIndicesForWasClosedPrevWrittenIndexOnDataOnlyNode() { + IndexMetaData indexMetaData = createIndexMetaData("test"); + Set indices = IncrementalClusterStateWriter.getRelevantIndices( + clusterStateWithJustOpenedIndex(indexMetaData, false), + clusterStateWithClosedIndex(indexMetaData, false), + Collections.singleton(indexMetaData.getIndex())); + assertThat(indices.size(), equalTo(1)); + } + + public void testResolveStatesToBeWritten() throws WriteStateException { + Map indices = new HashMap<>(); + Set relevantIndices = new HashSet<>(); + + IndexMetaData removedIndex = createIndexMetaData("removed_index"); + indices.put(removedIndex.getIndex(), 1L); + + IndexMetaData versionChangedIndex = createIndexMetaData("version_changed_index"); + indices.put(versionChangedIndex.getIndex(), 2L); + relevantIndices.add(versionChangedIndex.getIndex()); + + IndexMetaData notChangedIndex = createIndexMetaData("not_changed_index"); + indices.put(notChangedIndex.getIndex(), 3L); + relevantIndices.add(notChangedIndex.getIndex()); + + IndexMetaData newIndex = createIndexMetaData("new_index"); + relevantIndices.add(newIndex.getIndex()); + + MetaData oldMetaData = MetaData.builder() + .put(removedIndex, false) + .put(versionChangedIndex, false) + .put(notChangedIndex, false) + .build(); + + MetaData newMetaData = MetaData.builder() + .put(versionChangedIndex, true) + .put(notChangedIndex, false) + .put(newIndex, false) + .build(); + + IndexMetaData newVersionChangedIndex = newMetaData.index(versionChangedIndex.getIndex()); + + List actions = + IncrementalClusterStateWriter.resolveIndexMetaDataActions(indices, relevantIndices, oldMetaData, newMetaData); + + assertThat(actions, hasSize(3)); + + boolean keptPreviousGeneration = false; + boolean wroteNewIndex = false; + boolean wroteChangedIndex = false; + + for (IncrementalClusterStateWriter.IndexMetaDataAction action : actions) { + if (action instanceof IncrementalClusterStateWriter.KeepPreviousGeneration) { + assertThat(action.getIndex(), equalTo(notChangedIndex.getIndex())); + IncrementalClusterStateWriter.AtomicClusterStateWriter writer + = mock(IncrementalClusterStateWriter.AtomicClusterStateWriter.class); + assertThat(action.execute(writer), equalTo(3L)); + verify(writer, times(1)).incrementIndicesSkipped(); + verifyNoMoreInteractions(writer); + keptPreviousGeneration = true; + } + if (action instanceof IncrementalClusterStateWriter.WriteNewIndexMetaData) { + assertThat(action.getIndex(), equalTo(newIndex.getIndex())); + IncrementalClusterStateWriter.AtomicClusterStateWriter writer + = mock(IncrementalClusterStateWriter.AtomicClusterStateWriter.class); + when(writer.writeIndex("freshly created", newIndex)).thenReturn(0L); + assertThat(action.execute(writer), equalTo(0L)); + verify(writer, times(1)).incrementIndicesWritten(); + wroteNewIndex = true; + } + if (action instanceof IncrementalClusterStateWriter.WriteChangedIndexMetaData) { + assertThat(action.getIndex(), equalTo(newVersionChangedIndex.getIndex())); + IncrementalClusterStateWriter.AtomicClusterStateWriter writer + = mock(IncrementalClusterStateWriter.AtomicClusterStateWriter.class); + when(writer.writeIndex(anyString(), eq(newVersionChangedIndex))).thenReturn(3L); + assertThat(action.execute(writer), equalTo(3L)); + ArgumentCaptor reason = ArgumentCaptor.forClass(String.class); + verify(writer).writeIndex(reason.capture(), eq(newVersionChangedIndex)); + verify(writer, times(1)).incrementIndicesWritten(); + assertThat(reason.getValue(), containsString(Long.toString(versionChangedIndex.getVersion()))); + assertThat(reason.getValue(), containsString(Long.toString(newVersionChangedIndex.getVersion()))); + wroteChangedIndex = true; + } + } + + assertTrue(keptPreviousGeneration); + assertTrue(wroteNewIndex); + assertTrue(wroteChangedIndex); + } + + private static class MetaStateServiceWithFailures extends MetaStateService { + private final int invertedFailRate; + private boolean failRandomly; + + private MetaDataStateFormat wrap(MetaDataStateFormat format) { + return new MetaDataStateFormat(format.getPrefix()) { + @Override + public void toXContent(XContentBuilder builder, T state) throws IOException { + format.toXContent(builder, state); + } + + @Override + public T fromXContent(XContentParser parser) throws IOException { + return format.fromXContent(parser); + } + + @Override + protected Directory newDirectory(Path dir) { + MockDirectoryWrapper mock = newMockFSDirectory(dir); + if (failRandomly) { + MockDirectoryWrapper.Failure fail = new MockDirectoryWrapper.Failure() { + @Override + public void eval(MockDirectoryWrapper dir) throws IOException { + int r = randomIntBetween(0, invertedFailRate); + if (r == 0) { + throw new MockDirectoryWrapper.FakeIOException(); + } + } + }; + mock.failOn(fail); + } + closeAfterSuite(mock); + return mock; + } + }; + } + + MetaStateServiceWithFailures(int invertedFailRate, NodeEnvironment nodeEnv, NamedXContentRegistry namedXContentRegistry) { + super(nodeEnv, namedXContentRegistry); + META_DATA_FORMAT = wrap(MetaData.FORMAT); + INDEX_META_DATA_FORMAT = wrap(IndexMetaData.FORMAT); + MANIFEST_FORMAT = wrap(Manifest.FORMAT); + failRandomly = false; + this.invertedFailRate = invertedFailRate; + } + + void failRandomly() { + failRandomly = true; + } + + void noFailures() { + failRandomly = false; + } + } + + private boolean metaDataEquals(MetaData md1, MetaData md2) { + boolean equals = MetaData.isGlobalStateEquals(md1, md2); + + for (IndexMetaData imd : md1) { + IndexMetaData imd2 = md2.index(imd.getIndex()); + equals = equals && imd.equals(imd2); + } + + for (IndexMetaData imd : md2) { + IndexMetaData imd2 = md1.index(imd.getIndex()); + equals = equals && imd.equals(imd2); + } + return equals; + } + + private static MetaData randomMetaDataForTx() { + int settingNo = randomIntBetween(0, 10); + MetaData.Builder builder = MetaData.builder() + .persistentSettings(Settings.builder().put("setting" + settingNo, randomAlphaOfLength(5)).build()); + int numOfIndices = randomIntBetween(0, 3); + + for (int i = 0; i < numOfIndices; i++) { + int indexNo = randomIntBetween(0, 50); + IndexMetaData indexMetaData = IndexMetaData.builder("index" + indexNo).settings( + Settings.builder() + .put(IndexMetaData.SETTING_INDEX_UUID, "index" + indexNo) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build() + ).build(); + builder.put(indexMetaData, false); + } + return builder.build(); + } + + public void testAtomicityWithFailures() throws IOException { + try (NodeEnvironment env = newNodeEnvironment()) { + MetaStateServiceWithFailures metaStateService = + new MetaStateServiceWithFailures(randomIntBetween(100, 1000), env, xContentRegistry()); + + // We only guarantee atomicity of writes, if there is initial Manifest file + Manifest manifest = Manifest.empty(); + MetaData metaData = MetaData.EMPTY_META_DATA; + metaStateService.writeManifestAndCleanup("startup", Manifest.empty()); + long currentTerm = randomNonNegativeLong(); + long clusterStateVersion = randomNonNegativeLong(); + + metaStateService.failRandomly(); + Set possibleMetaData = new HashSet<>(); + possibleMetaData.add(metaData); + + for (int i = 0; i < randomIntBetween(1, 5); i++) { + IncrementalClusterStateWriter.AtomicClusterStateWriter writer = + new IncrementalClusterStateWriter.AtomicClusterStateWriter(metaStateService, manifest); + metaData = randomMetaDataForTx(); + Map indexGenerations = new HashMap<>(); + + try { + long globalGeneration = writer.writeGlobalState("global", metaData); + + for (IndexMetaData indexMetaData : metaData) { + long generation = writer.writeIndex("index", indexMetaData); + indexGenerations.put(indexMetaData.getIndex(), generation); + } + + Manifest newManifest = new Manifest(currentTerm, clusterStateVersion, globalGeneration, indexGenerations); + writer.writeManifestAndCleanup("manifest", newManifest); + possibleMetaData.clear(); + possibleMetaData.add(metaData); + manifest = newManifest; + } catch (WriteStateException e) { + if (e.isDirty()) { + possibleMetaData.add(metaData); + /* + * If dirty WriteStateException occurred, it's only safe to proceed if there is subsequent + * successful write of metadata and Manifest. We prefer to break here, not to over complicate test logic. + * See also MetaDataStateFormat#testFailRandomlyAndReadAnyState, that does not break. + */ + break; + } + } + } + + metaStateService.noFailures(); + + Tuple manifestAndMetaData = metaStateService.loadFullState(); + MetaData loadedMetaData = manifestAndMetaData.v2(); + + assertTrue(possibleMetaData.stream().anyMatch(md -> metaDataEquals(md, loadedMetaData))); + } + } + + @TestLogging(value = "org.elasticsearch.gateway:WARN", reason = "to ensure that we log gateway events on WARN level") + public void testSlowLogging() throws WriteStateException, IllegalAccessException { + final long slowWriteLoggingThresholdMillis; + final Settings settings; + if (randomBoolean()) { + slowWriteLoggingThresholdMillis = IncrementalClusterStateWriter.SLOW_WRITE_LOGGING_THRESHOLD.get(Settings.EMPTY).millis(); + settings = Settings.EMPTY; + } else { + slowWriteLoggingThresholdMillis = randomLongBetween(2, 100000); + settings = Settings.builder() + .put(IncrementalClusterStateWriter.SLOW_WRITE_LOGGING_THRESHOLD.getKey(), slowWriteLoggingThresholdMillis + "ms") + .build(); + } + + final DiscoveryNode localNode = newNode("node"); + final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId())).build(); + + final long startTimeMillis = randomLongBetween(0L, Long.MAX_VALUE - slowWriteLoggingThresholdMillis * 10); + final AtomicLong currentTime = new AtomicLong(startTimeMillis); + final AtomicLong writeDurationMillis = new AtomicLong(slowWriteLoggingThresholdMillis); + + final ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + final IncrementalClusterStateWriter incrementalClusterStateWriter + = new IncrementalClusterStateWriter(settings, clusterSettings, mock(MetaStateService.class), + new Manifest(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), Collections.emptyMap()), + clusterState, () -> currentTime.getAndAdd(writeDurationMillis.get())); + + assertExpectedLogs(clusterState, incrementalClusterStateWriter, new MockLogAppender.SeenEventExpectation( + "should see warning at threshold", + IncrementalClusterStateWriter.class.getCanonicalName(), + Level.WARN, + "writing cluster state took [*] which is above the warn threshold of [*]; " + + "wrote metadata for [0] indices and skipped [0] unchanged indices")); + + writeDurationMillis.set(randomLongBetween(slowWriteLoggingThresholdMillis, slowWriteLoggingThresholdMillis * 2)); + assertExpectedLogs(clusterState, incrementalClusterStateWriter, new MockLogAppender.SeenEventExpectation( + "should see warning above threshold", + IncrementalClusterStateWriter.class.getCanonicalName(), + Level.WARN, + "writing cluster state took [*] which is above the warn threshold of [*]; " + + "wrote metadata for [0] indices and skipped [0] unchanged indices")); + + writeDurationMillis.set(randomLongBetween(1, slowWriteLoggingThresholdMillis - 1)); + assertExpectedLogs(clusterState, incrementalClusterStateWriter, new MockLogAppender.UnseenEventExpectation( + "should not see warning below threshold", + IncrementalClusterStateWriter.class.getCanonicalName(), + Level.WARN, + "*")); + + clusterSettings.applySettings(Settings.builder() + .put(IncrementalClusterStateWriter.SLOW_WRITE_LOGGING_THRESHOLD.getKey(), writeDurationMillis.get() + "ms") + .build()); + assertExpectedLogs(clusterState, incrementalClusterStateWriter, new MockLogAppender.SeenEventExpectation( + "should see warning at reduced threshold", + IncrementalClusterStateWriter.class.getCanonicalName(), + Level.WARN, + "writing cluster state took [*] which is above the warn threshold of [*]; " + + "wrote metadata for [0] indices and skipped [0] unchanged indices")); + + assertThat(currentTime.get(), lessThan(startTimeMillis + 10 * slowWriteLoggingThresholdMillis)); // ensure no overflow + } + + private void assertExpectedLogs(ClusterState clusterState, IncrementalClusterStateWriter incrementalClusterStateWriter, + MockLogAppender.LoggingExpectation expectation) throws IllegalAccessException, WriteStateException { + MockLogAppender mockAppender = new MockLogAppender(); + mockAppender.start(); + mockAppender.addExpectation(expectation); + Logger classLogger = LogManager.getLogger(IncrementalClusterStateWriter.class); + Loggers.addAppender(classLogger, mockAppender); + + try { + incrementalClusterStateWriter.updateClusterState(clusterState, clusterState); + } finally { + Loggers.removeAppender(classLogger, mockAppender); + mockAppender.stop(); + } + mockAppender.assertAllExpectationsMatched(); + } +} diff --git a/server/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java b/server/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java index fba9a005c539..29ad07bae2fa 100644 --- a/server/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java +++ b/server/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java @@ -68,13 +68,15 @@ public class QuorumGatewayIT extends ESIntegTestCase { @Override public void doAfterNodes(int numNodes, final Client activeClient) throws Exception { if (numNodes == 1) { - assertTrue(awaitBusy(() -> { + assertBusy(() -> { logger.info("--> running cluster_health (wait for the shards to startup)"); ClusterHealthResponse clusterHealth = activeClient.admin().cluster().health(clusterHealthRequest() .waitForYellowStatus().waitForNodes("2").waitForActiveShards(test.numPrimaries * 2)).actionGet(); logger.info("--> done cluster_health, status {}", clusterHealth.getStatus()); - return (!clusterHealth.isTimedOut()) && clusterHealth.getStatus() == ClusterHealthStatus.YELLOW; - }, 30, TimeUnit.SECONDS)); + assertFalse(clusterHealth.isTimedOut()); + assertEquals(ClusterHealthStatus.YELLOW, clusterHealth.getStatus()); + }, 30, TimeUnit.SECONDS); + logger.info("--> one node is closed -- index 1 document into the remaining nodes"); activeClient.prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject().field("field", "value3") .endObject()).get(); diff --git a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java index 86a56bc50a05..54602837f706 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -70,7 +70,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { return new CompressedXContent(Strings.toString(builder)); } - public void testBaseAsyncTask() throws InterruptedException, IOException { + public void testBaseAsyncTask() throws Exception { IndexService indexService = createIndex("test", Settings.EMPTY); AtomicReference latch = new AtomicReference<>(new CountDownLatch(1)); AtomicReference latch2 = new AtomicReference<>(new CountDownLatch(1)); @@ -126,7 +126,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { // now close the index final Index index = indexService.index(); assertAcked(client().admin().indices().prepareClose(index.getName())); - awaitBusy(() -> getInstanceFromNode(IndicesService.class).hasIndex(index)); + assertBusy(() -> assertTrue("Index not found: " + index.getName(), getInstanceFromNode(IndicesService.class).hasIndex(index))); final IndexService closedIndexService = getInstanceFromNode(IndicesService.class).indexServiceSafe(index); assertNotSame(indexService, closedIndexService); @@ -136,7 +136,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { // now reopen the index assertAcked(client().admin().indices().prepareOpen(index.getName())); - awaitBusy(() -> getInstanceFromNode(IndicesService.class).hasIndex(index)); + assertBusy(() -> assertTrue("Index not found: " + index.getName(), getInstanceFromNode(IndicesService.class).hasIndex(index))); indexService = getInstanceFromNode(IndicesService.class).indexServiceSafe(index); assertNotSame(closedIndexService, indexService); @@ -204,7 +204,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { // now close the index final Index index = indexService.index(); assertAcked(client().admin().indices().prepareClose(index.getName())); - awaitBusy(() -> getInstanceFromNode(IndicesService.class).hasIndex(index)); + assertBusy(() -> assertTrue("Index not found: " + index.getName(), getInstanceFromNode(IndicesService.class).hasIndex(index))); final IndexService closedIndexService = getInstanceFromNode(IndicesService.class).indexServiceSafe(index); assertNotSame(indexService, closedIndexService); @@ -215,7 +215,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { // now reopen the index assertAcked(client().admin().indices().prepareOpen(index.getName())); - awaitBusy(() -> getInstanceFromNode(IndicesService.class).hasIndex(index)); + assertBusy(() -> assertTrue("Index not found: " + index.getName(), getInstanceFromNode(IndicesService.class).hasIndex(index))); indexService = getInstanceFromNode(IndicesService.class).indexServiceSafe(index); assertNotSame(closedIndexService, indexService); refreshTask = indexService.getRefreshTask(); @@ -241,7 +241,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { // now close the index final Index index = indexService.index(); assertAcked(client().admin().indices().prepareClose(index.getName())); - awaitBusy(() -> getInstanceFromNode(IndicesService.class).hasIndex(index)); + assertBusy(() -> assertTrue("Index not found: " + index.getName(), getInstanceFromNode(IndicesService.class).hasIndex(index))); final IndexService closedIndexService = getInstanceFromNode(IndicesService.class).indexServiceSafe(index); assertNotSame(indexService, closedIndexService); @@ -252,7 +252,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { // now reopen the index assertAcked(client().admin().indices().prepareOpen(index.getName())); - awaitBusy(() -> getInstanceFromNode(IndicesService.class).hasIndex(index)); + assertBusy(() -> assertTrue("Index not found: " + index.getName(), getInstanceFromNode(IndicesService.class).hasIndex(index))); indexService = getInstanceFromNode(IndicesService.class).indexServiceSafe(index); assertNotSame(closedIndexService, indexService); fsyncTask = indexService.getFsyncTask(); diff --git a/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java b/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java index ce46374cf271..2ae8acf86a0b 100644 --- a/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java +++ b/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java @@ -21,24 +21,18 @@ package org.elasticsearch.index; import org.elasticsearch.Version; import org.elasticsearch.action.search.SearchTask; -import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.Scroll; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.TestSearchContext; -import org.elasticsearch.threadpool.ThreadPool; import org.hamcrest.Matchers; import java.io.IOException; @@ -59,98 +53,12 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase { protected SearchContext createSearchContext(IndexService indexService) { return createSearchContext(indexService, new String[]{}); } + protected SearchContext createSearchContext(IndexService indexService, String ... groupStats) { BigArrays bigArrays = indexService.getBigArrays(); - ThreadPool threadPool = indexService.getThreadPool(); + final ShardSearchRequest request = + new ShardSearchRequest(new ShardId(indexService.index(), 0), 0L, null); return new TestSearchContext(bigArrays, indexService) { - final ShardSearchRequest request = new ShardSearchRequest() { - private SearchSourceBuilder searchSourceBuilder; - @Override - public ShardId shardId() { - return new ShardId(indexService.index(), 0); - } - - @Override - public SearchSourceBuilder source() { - return searchSourceBuilder; - } - - @Override - public AliasFilter getAliasFilter() { - return new AliasFilter(QueryBuilders.matchAllQuery(), "foo"); - } - - @Override - public void setAliasFilter(AliasFilter filter) { - - } - - @Override - public void source(SearchSourceBuilder source) { - searchSourceBuilder = source; - } - - @Override - public int numberOfShards() { - return 0; - } - - @Override - public SearchType searchType() { - return null; - } - - @Override - public float indexBoost() { - return 1.0f; - } - - @Override - public long nowInMillis() { - return 0; - } - - @Override - public Boolean requestCache() { - return null; - } - - @Override - public boolean allowPartialSearchResults() { - return true; - } - - @Override - public Scroll scroll() { - return null; - } - - @Override - public String[] indexRoutings() { - return null; - } - - @Override - public String preference() { - return null; - } - - @Override - public BytesReference cacheKey() { - return null; - } - - @Override - public Rewriteable getRewriteable() { - return null; - } - - @Override - public String getClusterAlias() { - return null; - } - }; - @Override public List groupStats() { return Arrays.asList(groupStats); diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineMergeIT.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineMergeIT.java index a072ca880e01..328efd817461 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineMergeIT.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineMergeIT.java @@ -27,19 +27,18 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.hamcrest.Matchers; - -import java.io.IOException; -import java.util.concurrent.ExecutionException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.lessThanOrEqualTo; @ClusterScope(supportsDedicatedMasters = false, numDataNodes = 1, scope = Scope.SUITE) public class InternalEngineMergeIT extends ESIntegTestCase { - public void testMergesHappening() throws InterruptedException, IOException, ExecutionException { + public void testMergesHappening() throws Exception { final int numOfShards = randomIntBetween(1, 5); // some settings to keep num segments low assertAcked(prepareCreate("test").setSettings(Settings.builder() @@ -66,21 +65,24 @@ public class InternalEngineMergeIT extends ESIntegTestCase { stats.getPrimaries().getMerge().getCurrent()); } final long upperNumberSegments = 2 * numOfShards * 10; - awaitBusy(() -> { + + assertBusy(() -> { IndicesStatsResponse stats = client().admin().indices().prepareStats().setSegments(true).setMerge(true).get(); logger.info("numshards {}, segments {}, total merges {}, current merge {}", numOfShards, stats.getPrimaries().getSegments().getCount(), stats.getPrimaries().getMerge().getTotal(), stats.getPrimaries().getMerge().getCurrent()); long current = stats.getPrimaries().getMerge().getCurrent(); long count = stats.getPrimaries().getSegments().getCount(); - return count < upperNumberSegments && current == 0; + assertThat(count, lessThan(upperNumberSegments)); + assertThat(current, equalTo(0L)); }); + IndicesStatsResponse stats = client().admin().indices().prepareStats().setSegments(true).setMerge(true).get(); logger.info("numshards {}, segments {}, total merges {}, current merge {}", numOfShards, stats.getPrimaries().getSegments().getCount(), stats.getPrimaries().getMerge().getTotal(), stats.getPrimaries().getMerge().getCurrent()); long count = stats.getPrimaries().getSegments().getCount(); - assertThat(count, Matchers.lessThanOrEqualTo(upperNumberSegments)); + assertThat(count, lessThanOrEqualTo(upperNumberSegments)); } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 0c2133b59809..b5cc265601a3 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -731,16 +731,20 @@ public class InternalEngineTests extends EngineTestCase { } public void testFlushIsDisabledDuringTranslogRecovery() throws IOException { - assertFalse(engine.isRecovering()); + engine.ensureCanFlush(); // recovered already ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null); engine.index(indexForDoc(doc)); engine.close(); engine = new InternalEngine(engine.config()); + expectThrows(IllegalStateException.class, engine::ensureCanFlush); expectThrows(IllegalStateException.class, () -> engine.flush(true, true)); - assertTrue(engine.isRecovering()); - engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); - assertFalse(engine.isRecovering()); + if (randomBoolean()) { + engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); + } else { + engine.skipTranslogRecovery(); + } + engine.ensureCanFlush(); // ready doc = testParsedDocument("2", null, testDocumentWithTextField(), SOURCE, null); engine.index(indexForDoc(doc)); engine.flush(); @@ -2825,7 +2829,7 @@ public class InternalEngineTests extends EngineTestCase { { for (int i = 0; i < 2; i++) { try (InternalEngine engine = new InternalEngine(config)) { - assertTrue(engine.isRecovering()); + expectThrows(IllegalStateException.class, engine::ensureCanFlush); Map userData = engine.getLastCommittedSegmentInfos().getUserData(); if (i == 0) { assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); @@ -5260,7 +5264,7 @@ public class InternalEngineTests extends EngineTestCase { minTranslogGen = engine.getTranslog().getMinFileGeneration(); } - store.trimUnsafeCommits(globalCheckpoint.get(), minTranslogGen,config.getIndexSettings().getIndexVersionCreated()); + store.trimUnsafeCommits(config.getTranslogConfig().getTranslogPath()); long safeMaxSeqNo = commitMaxSeqNo.stream().filter(s -> s <= globalCheckpoint.get()) .reduce((s1, s2) -> s2) // get the last one. diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java index a1ad67a0550a..639274c0c6a1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java @@ -20,12 +20,16 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.IndexOptions; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.VersionUtils; import java.util.Arrays; import java.util.Collections; @@ -95,33 +99,33 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { assertFieldNames(Collections.emptySet(), doc); } - public void testExplicitEnabled() throws Exception { + public void testUsingEnabledSettingThrows() throws Exception { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").field("enabled", true).endObject() .startObject("properties") .startObject("field").field("type", "keyword").field("doc_values", false).endObject() .endObject().endObject().endObject()); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser() - .parse("type", new CompressedXContent(mapping)); - FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); - assertTrue(fieldNamesMapper.fieldType().isEnabled()); + MapperParsingException ex = expectThrows(MapperParsingException.class, + () -> createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping))); - ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", - BytesReference.bytes(XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject()), - XContentType.JSON)); - - assertFieldNames(set("field"), doc); - assertWarnings(FieldNamesFieldMapper.TypeParser.ENABLED_DEPRECATION_MESSAGE.replace("{}", "test")); + assertEquals("The `enabled` setting for the `_field_names` field has been deprecated and removed but is still used in index [{}]. " + + "Please remove it from your mappings and templates.", ex.getMessage()); } - public void testDisabled() throws Exception { + /** + * disabling the _field_names should still work for indices before 8.0 + */ + public void testUsingEnabledBefore8() throws Exception { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").field("enabled", false).endObject() .endObject().endObject()); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser() + + DocumentMapper docMapper = createIndex("test", + Settings.builder() + .put(IndexMetaData.SETTING_INDEX_VERSION_CREATED.getKey(), + VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0)) + .build()).mapperService() + .documentMapperParser() .parse("type", new CompressedXContent(mapping)); FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); assertFalse(fieldNamesMapper.fieldType().isEnabled()); @@ -137,14 +141,20 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { assertWarnings(FieldNamesFieldMapper.TypeParser.ENABLED_DEPRECATION_MESSAGE.replace("{}", "test")); } - public void testMergingMappings() throws Exception { + /** + * Merging the "_field_names" enabled setting is forbidden in 8.0, but we still want to tests the behavior on pre-8 indices + */ + public void testMergingMappingsBefore8() throws Exception { String enabledMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").field("enabled", true).endObject() .endObject().endObject()); String disabledMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_field_names").field("enabled", false).endObject() .endObject().endObject()); - MapperService mapperService = createIndex("test").mapperService(); + MapperService mapperService = createIndex("test", Settings.builder() + .put(IndexMetaData.SETTING_INDEX_VERSION_CREATED.getKey(), + VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0)) + .build()).mapperService(); DocumentMapper mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE); @@ -156,4 +166,12 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { assertTrue(mapperEnabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); assertWarnings(FieldNamesFieldMapper.TypeParser.ENABLED_DEPRECATION_MESSAGE.replace("{}", "test")); } + + @Override + protected boolean forbidPrivateIndexSettings() { + /** + * This is needed to force the index version with {@link IndexMetaData.SETTING_INDEX_VERSION_CREATED}. + */ + return false; + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java index 11b365ff16e2..1b100fb0872e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java @@ -27,9 +27,12 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.QueryShardException; import java.util.function.Predicate; +import static org.hamcrest.Matchers.containsString; + public class IndexFieldTypeTests extends FieldTypeTestCase { @Override @@ -46,15 +49,6 @@ public class IndexFieldTypeTests extends FieldTypeTestCase { assertEquals(new MatchNoDocsQuery(), ft.prefixQuery("other_ind", null, createContext())); } - public void testRegexpQuery() { - MappedFieldType ft = createDefaultFieldType(); - ft.setName("field"); - ft.setIndexOptions(IndexOptions.DOCS); - - assertEquals(new MatchAllDocsQuery(), ft.regexpQuery("ind.x", 0, 10, null, createContext())); - assertEquals(new MatchNoDocsQuery(), ft.regexpQuery("ind?x", 0, 10, null, createContext())); - } - public void testWildcardQuery() { MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); @@ -64,6 +58,16 @@ public class IndexFieldTypeTests extends FieldTypeTestCase { assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("other_ind*x", null, createContext())); } + public void testRegexpQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + ft.setIndexOptions(IndexOptions.DOCS); + + QueryShardException e = expectThrows(QueryShardException.class, () -> + assertEquals(new MatchAllDocsQuery(), ft.regexpQuery("ind.x", 0, 10, null, createContext()))); + assertThat(e.getMessage(), containsString("Can only use regexp queries on keyword and text fields")); + } + private QueryShardContext createContext() { IndexMetaData indexMetaData = IndexMetaData.builder("index") .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java index fb7386446c1b..535f27c5aa3d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryStringQueryBuilder; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -130,7 +129,7 @@ public class RangeFieldQueryStringQueryBuilderTests extends AbstractQueryTestCas } @Override - protected void doAssertLuceneQuery(QueryStringQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { + protected void doAssertLuceneQuery(QueryStringQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { assertThat(query, either(instanceOf(PointRangeQuery.class)).or(instanceOf(IndexOrDocValuesQuery.class))); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index 6288c3c95666..10a31d805480 100644 --- a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.hamcrest.Matchers; @@ -74,11 +73,10 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase clauses = new ArrayList<>(); clauses.addAll(getBooleanClauses(queryBuilder.must(), BooleanClause.Occur.MUST, context)); clauses.addAll(getBooleanClauses(queryBuilder.mustNot(), BooleanClause.Occur.MUST_NOT, context)); diff --git a/server/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java index 0e0f767d5a5f..534126ee5f35 100644 --- a/server/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.query; import org.apache.lucene.queries.function.FunctionScoreQuery; import org.apache.lucene.search.Query; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -40,9 +39,9 @@ public class BoostingQueryBuilderTests extends AbstractQueryTestCase queries = AbstractQueryBuilder.toQueries(queryBuilder.innerQueries(), context.getQueryShardContext()); + protected void doAssertLuceneQuery(DisMaxQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + Collection queries = AbstractQueryBuilder.toQueries(queryBuilder.innerQueries(), context); assertThat(query, instanceOf(DisjunctionMaxQuery.class)); DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) query; assertThat(disjunctionMaxQuery.getTieBreakerMultiplier(), equalTo(queryBuilder.tieBreaker())); diff --git a/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java index c2fcfdd7140e..c1622057b6ba 100644 --- a/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.joda.time.DateTime; import org.elasticsearch.index.query.DistanceFeatureQueryBuilder.Origin; @@ -74,7 +73,9 @@ public class DistanceFeatureQueryBuilderTests extends AbstractQueryTestCase fields = context.getQueryShardContext().simpleMatchToIndexNames(fieldPattern); - Collection mappedFields = fields.stream().filter((field) -> context.getQueryShardContext().getObjectMapper(field) != null - || context.getQueryShardContext().getMapperService().fullName(field) != null).collect(Collectors.toList()); + Collection fields = context.simpleMatchToIndexNames(fieldPattern); + Collection mappedFields = fields.stream().filter((field) -> context.getObjectMapper(field) != null + || context.getMapperService().fullName(field) != null).collect(Collectors.toList()); if (fields.size() == 1 && mappedFields.size() == 0) { assertThat(query, instanceOf(MatchNoDocsQuery.class)); MatchNoDocsQuery matchNoDocsQuery = (MatchNoDocsQuery) query; @@ -73,21 +72,21 @@ public class ExistsQueryBuilderTests extends AbstractQueryTestCase childFields = new ArrayList<>(); - context.getQueryShardContext().getObjectMapper(field).forEach(mapper -> childFields.add(mapper.name())); + context.getObjectMapper(field).forEach(mapper -> childFields.add(mapper.name())); assertThat(booleanQuery.clauses().size(), equalTo(childFields.size())); for (int i = 0; i < childFields.size(); i++) { BooleanClause booleanClause = booleanQuery.clauses().get(i); assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); } - } else if (context.getQueryShardContext().getMapperService().fullName(field).hasDocValues()) { + } else if (context.getMapperService().fullName(field).hasDocValues()) { assertThat(constantScoreQuery.getQuery(), instanceOf(DocValuesFieldExistsQuery.class)); DocValuesFieldExistsQuery dvExistsQuery = (DocValuesFieldExistsQuery) constantScoreQuery.getQuery(); assertEquals(field, dvExistsQuery.getField()); - } else if (context.getQueryShardContext().getMapperService().fullName(field).omitNorms() == false) { + } else if (context.getMapperService().fullName(field).omitNorms() == false) { assertThat(constantScoreQuery.getQuery(), instanceOf(NormsFieldExistsQuery.class)); NormsFieldExistsQuery normsExistsQuery = (NormsFieldExistsQuery) constantScoreQuery.getQuery(); assertEquals(field, normsExistsQuery.getField()); diff --git a/server/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java index 9d98a12358f2..f564972b2a43 100644 --- a/server/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/FieldMaskingSpanQueryBuilderTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.Query; import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -43,12 +42,14 @@ public class FieldMaskingSpanQueryBuilderTests extends AbstractQueryTestCase } @Override - protected void doAssertLuceneQuery(IdsQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { + protected void doAssertLuceneQuery(IdsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { if (queryBuilder.ids().size() == 0) { assertThat(query, instanceOf(MatchNoDocsQuery.class)); } else { diff --git a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index 8c6f3a75b7ae..4982d7477d99 100644 --- a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -35,7 +35,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilderTests; -import org.elasticsearch.search.internal.ShardSearchLocalRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; @@ -99,7 +99,7 @@ public class InnerHitBuilderTests extends ESTestCase { * * This is necessary to ensure because we use the serialized BytesReference * of this builder as part of the cacheKey in - * {@link ShardSearchLocalRequest} (via + * {@link ShardSearchRequest} (via * {@link SearchSourceBuilder#collapse(org.elasticsearch.search.collapse.CollapseBuilder)}) */ public void testSerializationOrder() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java index 4974a6cb6902..4f2d9d217f95 100644 --- a/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/IntervalQueryBuilderTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -134,7 +133,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase 0) { assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) query; diff --git a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java index 3bbcca56beca..6b4ab99451f6 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java @@ -40,7 +40,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.query.MultiMatchQueryBuilder.Type; import org.elasticsearch.index.search.MatchQuery; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -155,7 +154,7 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase innerHitInternals = new HashMap<>(); InnerHitContextBuilder.extractInnerHits(queryBuilder, innerHitInternals); - InnerHitsContext innerHitsContext = new InnerHitsContext(); - for (InnerHitContextBuilder builder : innerHitInternals.values()) { - builder.build(searchContext, innerHitsContext); - } - assertEquals(1, innerHitsContext.getInnerHits().size()); - assertTrue(innerHitsContext.getInnerHits().containsKey(queryBuilder.innerHit().getName())); - InnerHitsContext.InnerHitSubContext innerHits = innerHitsContext.getInnerHits().get(queryBuilder.innerHit().getName()); - assertEquals(innerHits.size(), queryBuilder.innerHit().getSize()); - assertEquals(innerHits.sort().sort.getSort().length, 1); - assertEquals(innerHits.sort().sort.getSort()[0].getField(), INT_FIELD_NAME); + assertTrue(innerHitInternals.containsKey(queryBuilder.innerHit().getName())); + InnerHitContextBuilder innerHits = innerHitInternals.get(queryBuilder.innerHit().getName()); + assertEquals(innerHits.innerHitBuilder(), queryBuilder.innerHit()); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java index 2f868d029213..ee56a67092d7 100644 --- a/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/PrefixQueryBuilderTests.java @@ -24,7 +24,6 @@ import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -68,7 +67,7 @@ public class PrefixQueryBuilderTests extends AbstractQueryTestCase spanQueryBuilderIterator = queryBuilder.clauses().iterator(); for (SpanQuery spanQuery : spanNearQuery.getClauses()) { - assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context.getQueryShardContext()))); + assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context))); } } else if (query instanceof SpanTermQuery || query instanceof SpanBoostQuery) { assertThat(queryBuilder.clauses().size(), equalTo(1)); - assertThat(query, equalTo(queryBuilder.clauses().get(0).toQuery(context.getQueryShardContext()))); + assertThat(query, equalTo(queryBuilder.clauses().get(0).toQuery(context))); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java index 7df58553e276..ed6deb684489 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -56,11 +55,11 @@ public class SpanNotQueryBuilderTests extends AbstractQueryTestCase spanQueryBuilderIterator = queryBuilder.clauses().iterator(); for (SpanQuery spanQuery : spanOrQuery.getClauses()) { - assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context.getQueryShardContext()))); + assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context))); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java index a5ef596e0255..27f20f2295ae 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java @@ -27,7 +27,6 @@ import org.apache.lucene.search.spans.SpanTermQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; @@ -59,14 +58,14 @@ public class SpanTermQueryBuilderTests extends AbstractTermQueryTestCase shards.getPrimary().pendingInSync()); + assertBusy(() -> assertTrue(shards.getPrimary().pendingInSync())); { shards.index(new IndexRequest(index.getName(), "type", "last").source("{}", XContentType.JSON)); final long expectedDocs = docs + 3L; diff --git a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java index 582dd6c18f64..f5642a433101 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java @@ -184,7 +184,7 @@ public class ReplicationTrackerTests extends ReplicationTrackerTestCase { assertThat(updatedGlobalCheckpoint.get(), equalTo(update)); } - public void testMarkAllocationIdAsInSync() throws BrokenBarrierException, InterruptedException { + public void testMarkAllocationIdAsInSync() throws Exception { final long initialClusterStateVersion = randomNonNegativeLong(); Map activeWithCheckpoints = randomAllocationsWithLocalCheckpoints(1, 1); Set active = new HashSet<>(activeWithCheckpoints.keySet()); @@ -212,7 +212,7 @@ public class ReplicationTrackerTests extends ReplicationTrackerTestCase { }); thread.start(); barrier.await(); - awaitBusy(tracker::pendingInSync); + assertBusy(() -> assertTrue(tracker.pendingInSync())); final long updatedLocalCheckpoint = randomLongBetween(1 + localCheckpoint, Long.MAX_VALUE); // there is a shard copy pending in sync, the global checkpoint can not advance updatedGlobalCheckpoint.set(UNASSIGNED_SEQ_NO); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java index 416e71709902..24809e575356 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java @@ -495,13 +495,14 @@ public class IndexShardOperationPermitsTests extends ESTestCase { * permits to the semaphore. We wait here until all generic threads are idle as an indication that all permits have been returned to * the semaphore. */ - awaitBusy(() -> { + assertBusy(() -> { for (final ThreadPoolStats.Stats stats : threadPool.stats()) { if (ThreadPool.Names.GENERIC.equals(stats.getName())) { - return stats.getActive() == 0; + assertThat("Expected no active threads in GENERIC pool", stats.getActive(), equalTo(0)); + return; } } - return false; + fail("Failed to find stats for the GENERIC thread pool"); }); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 811e27ee8848..90ec2ece394e 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -3356,7 +3356,7 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(primary); } - public void testScheduledRefresh() throws IOException, InterruptedException { + public void testScheduledRefresh() throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) @@ -3381,7 +3381,7 @@ public class IndexShardTests extends IndexShardTestCase { assertFalse(primary.scheduledRefresh()); assertEquals(lastSearchAccess, primary.getLastSearcherAccess()); // wait until the thread-pool has moved the timestamp otherwise we can't assert on this below - awaitBusy(() -> primary.getThreadPool().relativeTimeInMillis() > lastSearchAccess); + assertBusy(() -> assertThat(primary.getThreadPool().relativeTimeInMillis(), greaterThan(lastSearchAccess))); CountDownLatch latch = new CountDownLatch(10); for (int i = 0; i < 10; i++) { primary.awaitShardSearchActive(refreshed -> { diff --git a/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index 3ca29b6b375b..74d72edf71f3 100644 --- a/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/server/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -53,6 +53,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -281,17 +282,20 @@ public class CorruptedFileIT extends ESIntegTestCase { client().admin().indices().prepareUpdateSettings("test").setSettings(build).get(); client().admin().cluster().prepareReroute().get(); - boolean didClusterTurnRed = awaitBusy(() -> { + boolean didClusterTurnRed = waitUntil(() -> { ClusterHealthStatus test = client().admin().cluster() .health(Requests.clusterHealthRequest("test")).actionGet().getStatus(); return test == ClusterHealthStatus.RED; }, 5, TimeUnit.MINUTES);// sometimes on slow nodes the replication / recovery is just dead slow + final ClusterHealthResponse response = client().admin().cluster() .health(Requests.clusterHealthRequest("test")).get(); + if (response.getStatus() != ClusterHealthStatus.RED) { logger.info("Cluster turned red in busy loop: {}", didClusterTurnRed); logger.info("cluster state:\n{}\n{}", - client().admin().cluster().prepareState().get().getState(), client().admin().cluster().preparePendingClusterTasks().get()); + client().admin().cluster().prepareState().get().getState(), + client().admin().cluster().preparePendingClusterTasks().get()); } assertThat(response.getStatus(), is(ClusterHealthStatus.RED)); ClusterState state = client().admin().cluster().prepareState().get().getState(); @@ -603,7 +607,7 @@ public class CorruptedFileIT extends ESIntegTestCase { Settings.builder().putNull(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey()) )); - ensureGreen(); + ensureGreen(TimeValue.timeValueSeconds(60)); } private int numShards(String... index) { diff --git a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java index 88da2b349f71..c37a1d90b84d 100644 --- a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -1029,38 +1029,6 @@ public class StoreTests extends ESTestCase { store.close(); } - public void testEnsureIndexHasHistoryUUID() throws IOException { - final ShardId shardId = new ShardId("index", "_na_", 1); - try (Store store = new Store(shardId, INDEX_SETTINGS, StoreTests.newDirectory(random()), new DummyShardLock(shardId))) { - - store.createEmpty(Version.LATEST); - - // remove the history uuid - IndexWriterConfig iwc = new IndexWriterConfig(null) - .setCommitOnClose(false) - // we don't want merges to happen here - we call maybe merge on the engine - // later once we stared it up otherwise we would need to wait for it here - // we also don't specify a codec here and merges should use the engines for this index - .setMergePolicy(NoMergePolicy.INSTANCE) - .setOpenMode(IndexWriterConfig.OpenMode.APPEND); - try (IndexWriter writer = new IndexWriter(store.directory(), iwc)) { - Map newCommitData = new HashMap<>(); - for (Map.Entry entry : writer.getLiveCommitData()) { - if (entry.getKey().equals(Engine.HISTORY_UUID_KEY) == false) { - newCommitData.put(entry.getKey(), entry.getValue()); - } - } - writer.setLiveCommitData(newCommitData.entrySet()); - writer.commit(); - } - - store.ensureIndexHasHistoryUUID(); - - SegmentInfos segmentInfos = Lucene.readSegmentInfos(store.directory()); - assertThat(segmentInfos.getUserData(), hasKey(Engine.HISTORY_UUID_KEY)); - } - } - public void testHistoryUUIDCanBeForced() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); try (Store store = new Store(shardId, INDEX_SETTINGS, StoreTests.newDirectory(random()), new DummyShardLock(shardId))) { diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java b/server/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java index 0d64ca05d209..a6bb2615ae29 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; +import org.elasticsearch.common.CheckedRunnable; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -50,7 +51,6 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.BooleanSupplier; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; @@ -218,25 +218,23 @@ public class IndicesLifecycleListenerIT extends ESIntegTestCase { private static void assertShardStatesMatch(final IndexShardStateChangeListener stateChangeListener, final int numShards, final IndexShardState... shardStates) - throws InterruptedException { + throws Exception { + CheckedRunnable waitPredicate = () -> { + assertEquals(stateChangeListener.shardStates.size(), numShards); - BooleanSupplier waitPredicate = () -> { - if (stateChangeListener.shardStates.size() != numShards) { - return false; - } for (List indexShardStates : stateChangeListener.shardStates.values()) { - if (indexShardStates == null || indexShardStates.size() != shardStates.length) { - return false; - } + assertNotNull(indexShardStates); + assertThat(indexShardStates.size(), equalTo(shardStates.length)); + for (int i = 0; i < shardStates.length; i++) { - if (indexShardStates.get(i) != shardStates[i]) { - return false; - } + assertThat(indexShardStates.get(i), equalTo(shardStates[i])); } } - return true; }; - if (!awaitBusy(waitPredicate, 1, TimeUnit.MINUTES)) { + + try { + assertBusy(waitPredicate, 1, TimeUnit.MINUTES); + } catch (AssertionError ae) { fail("failed to observe expect shard states\n" + "expected: [" + numShards + "] shards with states: " + Strings.arrayToCommaDelimitedString(shardStates) + "\n" + "observed:\n" + stateChangeListener); diff --git a/server/src/test/java/org/elasticsearch/indices/TermsLookupTests.java b/server/src/test/java/org/elasticsearch/indices/TermsLookupTests.java index 68e08e1e57aa..14cc8316a520 100644 --- a/server/src/test/java/org/elasticsearch/indices/TermsLookupTests.java +++ b/server/src/test/java/org/elasticsearch/indices/TermsLookupTests.java @@ -30,42 +30,36 @@ import static org.hamcrest.Matchers.containsString; public class TermsLookupTests extends ESTestCase { public void testTermsLookup() { String index = randomAlphaOfLengthBetween(1, 10); - String type = randomAlphaOfLengthBetween(1, 10); String id = randomAlphaOfLengthBetween(1, 10); String path = randomAlphaOfLengthBetween(1, 10); String routing = randomAlphaOfLengthBetween(1, 10); - TermsLookup termsLookup = new TermsLookup(index, type, id, path); + TermsLookup termsLookup = new TermsLookup(index, id, path); termsLookup.routing(routing); assertEquals(index, termsLookup.index()); - assertEquals(type, termsLookup.type()); assertEquals(id, termsLookup.id()); assertEquals(path, termsLookup.path()); assertEquals(routing, termsLookup.routing()); } public void testIllegalArguments() { - String type = randomAlphaOfLength(5); String id = randomAlphaOfLength(5); String path = randomAlphaOfLength(5); String index = randomAlphaOfLength(5); - switch (randomIntBetween(0, 3)) { + switch (randomIntBetween(0, 2)) { case 0: - type = null; - break; - case 1: id = null; break; - case 2: + case 1: path = null; break; - case 3: + case 2: index = null; break; default: fail("unknown case"); } try { - new TermsLookup(index, type, id, path); + new TermsLookup(index, id, path); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("[terms] query lookup element requires specifying")); } @@ -84,19 +78,6 @@ public class TermsLookupTests extends ESTestCase { } } - public void testSerializationWithTypes() throws IOException { - TermsLookup termsLookup = randomTermsLookupWithTypes(); - try (BytesStreamOutput output = new BytesStreamOutput()) { - termsLookup.writeTo(output); - try (StreamInput in = output.bytes().streamInput()) { - TermsLookup deserializedLookup = new TermsLookup(in); - assertEquals(deserializedLookup, termsLookup); - assertEquals(deserializedLookup.hashCode(), termsLookup.hashCode()); - assertNotSame(deserializedLookup, termsLookup); - } - } - } - public static TermsLookup randomTermsLookup() { return new TermsLookup( randomAlphaOfLength(10), @@ -105,12 +86,4 @@ public class TermsLookupTests extends ESTestCase { ).routing(randomBoolean() ? randomAlphaOfLength(10) : null); } - public static TermsLookup randomTermsLookupWithTypes() { - return new TermsLookup( - randomAlphaOfLength(10), - randomAlphaOfLength(10), - randomAlphaOfLength(10), - randomAlphaOfLength(10).replace('.', '_') - ).routing(randomBoolean() ? randomAlphaOfLength(10) : null); - } } diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index f6f057fb7dda..2390bae14a36 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -132,9 +132,10 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.isOneOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.oneOf; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class IndexRecoveryIT extends ESIntegTestCase { @@ -789,9 +790,11 @@ public class IndexRecoveryIT extends ESIntegTestCase { if (PeerRecoverySourceService.Actions.START_RECOVERY.equals(action) && count.incrementAndGet() == 1) { // ensures that it's considered as valid recovery attempt by source try { - awaitBusy(() -> client(blueNodeName).admin().cluster().prepareState().setLocal(true).get() - .getState().getRoutingTable().index("test").shard(0).getAllInitializingShards().isEmpty() == false); - } catch (InterruptedException e) { + assertBusy(() -> assertThat( + "Expected there to be some initializing shards", + client(blueNodeName).admin().cluster().prepareState().setLocal(true).get() + .getState().getRoutingTable().index("test").shard(0).getAllInitializingShards(), not(empty()))); + } catch (Exception e) { throw new RuntimeException(e); } connection.sendRequest(requestId, action, request, options); @@ -1473,7 +1476,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { try { IndexResponse response = client().prepareIndex(indexName, "_doc") .setSource(Map.of("f" + randomIntBetween(1, 10), randomNonNegativeLong()), XContentType.JSON).get(); - assertThat(response.getResult(), isOneOf(CREATED, UPDATED)); + assertThat(response.getResult(), is(oneOf(CREATED, UPDATED))); } catch (ElasticsearchException ignored) { } } diff --git a/server/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/server/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index f388e49b31fb..9ae87b683500 100644 --- a/server/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -145,8 +145,8 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { .get(); assertThat(clusterHealth.isTimedOut(), equalTo(false)); - assertThat(waitForShardDeletion(node_1, index, 0), equalTo(false)); - assertThat(waitForIndexDeletion(node_1, index), equalTo(false)); + assertShardDeleted(node_1, index, 0); + assertIndexDeleted(node_1, index); assertThat(Files.exists(shardDirectory(node_2, index, 0)), equalTo(true)); assertThat(Files.exists(indexDirectory(node_2, index)), equalTo(true)); assertThat(Files.exists(shardDirectory(node_3, index, 0)), equalTo(true)); @@ -240,12 +240,13 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { // it must still delete the shard, even if it cannot find it anymore in indicesservice client().admin().indices().prepareDelete("test").get(); - assertThat(waitForShardDeletion(node_1, index, 0), equalTo(false)); - assertThat(waitForIndexDeletion(node_1, index), equalTo(false)); + assertShardDeleted(node_1, index, 0); + assertIndexDeleted(node_1, index); assertThat(Files.exists(shardDirectory(node_1, index, 0)), equalTo(false)); assertThat(Files.exists(indexDirectory(node_1, index)), equalTo(false)); - assertThat(waitForShardDeletion(node_2, index, 0), equalTo(false)); - assertThat(waitForIndexDeletion(node_2, index), equalTo(false)); + + assertShardDeleted(node_2, index, 0); + assertIndexDeleted(node_2, index); assertThat(Files.exists(shardDirectory(node_2, index, 0)), equalTo(false)); assertThat(Files.exists(indexDirectory(node_2, index)), equalTo(false)); } @@ -277,7 +278,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { assertThat(clusterHealth.isTimedOut(), equalTo(false)); logger.info("--> making sure that shard is not allocated on server3"); - assertThat(waitForShardDeletion(node_3, index, 0), equalTo(false)); + assertShardDeleted(node_3, index, 0); Path server2Shard = shardDirectory(node_2, index, 0); logger.info("--> stopping node {}", node_2); @@ -308,7 +309,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { logger.info("--> making sure that shard and its replica are allocated on server1 and server3 but not on server2"); assertThat(Files.exists(shardDirectory(node_1, index, 0)), equalTo(true)); assertThat(Files.exists(shardDirectory(node_3, index, 0)), equalTo(true)); - assertThat(waitForShardDeletion(node_4, index, 0), equalTo(false)); + assertShardDeleted(node_4, index, 0); } public void testShardActiveElsewhereDoesNotDeleteAnother() throws Exception { @@ -453,7 +454,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { waitNoPendingTasksOnAll(); logger.info("Checking if shards aren't removed"); for (int shard : node2Shards) { - assertTrue(waitForShardDeletion(nonMasterNode, index, shard)); + assertShardExists(nonMasterNode, index, shard); } } @@ -471,13 +472,18 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { return paths[0]; } - private boolean waitForShardDeletion(final String server, final Index index, final int shard) throws InterruptedException { - awaitBusy(() -> !Files.exists(shardDirectory(server, index, shard))); - return Files.exists(shardDirectory(server, index, shard)); + private void assertShardDeleted(final String server, final Index index, final int shard) throws Exception { + final Path path = shardDirectory(server, index, shard); + assertBusy(() -> assertFalse("Expected shard to not exist: " + path, Files.exists(path))); } - private boolean waitForIndexDeletion(final String server, final Index index) throws InterruptedException { - awaitBusy(() -> !Files.exists(indexDirectory(server, index))); - return Files.exists(indexDirectory(server, index)); + private void assertShardExists(final String server, final Index index, final int shard) throws Exception { + final Path path = shardDirectory(server, index, shard); + assertBusy(() -> assertTrue("Expected shard to exist: " + path, Files.exists(path))); + } + + private void assertIndexDeleted(final String server, final Index index) throws Exception { + final Path path = indexDirectory(server, index); + assertBusy(() -> assertFalse("Expected index to be deleted: " + path, Files.exists(path))); } } diff --git a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java index 6f3ac3be58e2..be51847e9bb1 100644 --- a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java +++ b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java @@ -74,7 +74,7 @@ import java.util.concurrent.atomic.AtomicInteger; import static java.util.Objects.requireNonNull; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.test.ESTestCase.awaitBusy; +import static org.elasticsearch.test.ESTestCase.assertBusy; import static org.elasticsearch.test.ESTestCase.randomBoolean; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -330,10 +330,15 @@ public class TestPersistentTasksPlugin extends Plugin implements ActionPlugin, P AtomicInteger phase = new AtomicInteger(); while (true) { // wait for something to happen - assertTrue(awaitBusy(() -> testTask.isCancelled() || - testTask.getOperation() != null || - clusterService.lifecycleState() != Lifecycle.State.STARTED, // speedup finishing on closed nodes - 45, TimeUnit.SECONDS)); // This can take a while during large cluster restart + try { + assertBusy(() -> assertTrue(testTask.isCancelled() || + testTask.getOperation() != null || + clusterService.lifecycleState() != Lifecycle.State.STARTED), // speedup finishing on closed nodes + 45, TimeUnit.SECONDS); // This can take a while during large cluster restart + } catch (Exception ex) { + throw new RuntimeException(ex); + } + if (clusterService.lifecycleState() != Lifecycle.State.STARTED) { return; } diff --git a/server/src/test/java/org/elasticsearch/plugins/PlatformsTests.java b/server/src/test/java/org/elasticsearch/plugins/PlatformsTests.java new file mode 100644 index 000000000000..b23475a8c4c7 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/plugins/PlatformsTests.java @@ -0,0 +1,47 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.plugins; + +import org.apache.lucene.util.Constants; +import org.elasticsearch.test.ESTestCase; + +import java.nio.file.Path; + +public class PlatformsTests extends ESTestCase { + + public void testNativeControllerPath() { + + final Path nativeControllerPath = Platforms.nativeControllerPath(createTempDir()); + + // The directory structure on macOS must match Apple's .app + // structure or Gatekeeper may refuse to run the program + if (Constants.MAC_OS_X) { + String programName = nativeControllerPath.getFileName().toString(); + Path binDirectory = nativeControllerPath.getParent(); + assertEquals("MacOS", binDirectory.getFileName().toString()); + Path contentsDirectory = binDirectory.getParent(); + assertEquals("Contents", contentsDirectory.getFileName().toString()); + Path appDirectory = contentsDirectory.getParent(); + assertEquals(programName + ".app", appDirectory.getFileName().toString()); + } else { + Path binDirectory = nativeControllerPath.getParent(); + assertEquals("bin", binDirectory.getFileName().toString()); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index 88e88ee17388..88bd59fe3500 100644 --- a/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -362,23 +362,23 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { //if there was an error we try to wait and see if at some point it'll get fixed logger.info("--> trying to wait"); - assertTrue(awaitBusy(() -> { - boolean errorOccurred = false; - for (int i = 0; i < iterations; i++) { - SearchResponse searchResponse = client().prepareSearch() - .setTrackTotalHits(true) - .setSize(0) - .setQuery(matchAllQuery()) - .get(); - if (searchResponse.getHits().getTotalHits().value != numberOfDocs) { - errorOccurred = true; - } - } - return !errorOccurred; - }, - 5, - TimeUnit.MINUTES - ) + assertBusy( + () -> { + boolean errorOccurred = false; + for (int i = 0; i < iterations; i++) { + SearchResponse searchResponse = client().prepareSearch() + .setTrackTotalHits(true) + .setSize(0) + .setQuery(matchAllQuery()) + .get(); + if (searchResponse.getHits().getTotalHits().value != numberOfDocs) { + errorOccurred = true; + } + } + assertFalse("An error occurred while waiting", errorOccurred); + }, + 5, + TimeUnit.MINUTES ); assertEquals(numberOfDocs, ids.size()); } diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java index 7a1bcefea9d9..6004e7fff452 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java @@ -202,7 +202,7 @@ public class RepositoriesServiceTests extends ESTestCase { @Override public void snapshotShard(Store store, MapperService mapperService, SnapshotId snapshotId, IndexId indexId, IndexCommit - snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, ActionListener listener) { + snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, ActionListener listener) { } diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java index a904879321d5..b2f21e89a7d6 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java @@ -159,7 +159,8 @@ public class BlobStoreRepositoryRestoreTests extends IndexShardTestCase { // snapshot the shard final Repository repository = createRepository(); final Snapshot snapshot = new Snapshot(repository.getMetadata().name(), new SnapshotId(randomAlphaOfLength(10), "_uuid")); - snapshotShard(shard, snapshot, repository); + final String shardGen = snapshotShard(shard, snapshot, repository); + assertNotNull(shardGen); final Snapshot snapshotWithSameName = new Snapshot(repository.getMetadata().name(), new SnapshotId( snapshot.getSnapshotId().getName(), "_uuid2")); IndexShardSnapshotFailedException isfe = expectThrows(IndexShardSnapshotFailedException.class, diff --git a/server/src/test/java/org/elasticsearch/repositories/fs/FsRepositoryTests.java b/server/src/test/java/org/elasticsearch/repositories/fs/FsRepositoryTests.java index 6c48a19cbb5e..3c2d59564dea 100644 --- a/server/src/test/java/org/elasticsearch/repositories/fs/FsRepositoryTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/fs/FsRepositoryTests.java @@ -100,15 +100,16 @@ public class FsRepositoryTests extends ESTestCase { IndexId indexId = new IndexId(idxSettings.getIndex().getName(), idxSettings.getUUID()); IndexCommit indexCommit = Lucene.getIndexCommit(Lucene.readSegmentInfos(store.directory()), store.directory()); - final PlainActionFuture future1 = PlainActionFuture.newFuture(); + final PlainActionFuture future1 = PlainActionFuture.newFuture(); runGeneric(threadPool, () -> { - IndexShardSnapshotStatus snapshotStatus = IndexShardSnapshotStatus.newInitializing(); + IndexShardSnapshotStatus snapshotStatus = IndexShardSnapshotStatus.newInitializing(null); repository.snapshotShard(store, null, snapshotId, indexId, indexCommit, snapshotStatus, future1); future1.actionGet(); IndexShardSnapshotStatus.Copy copy = snapshotStatus.asCopy(); assertEquals(copy.getTotalFileCount(), copy.getIncrementalFileCount()); }); + final String shardGeneration = future1.actionGet(); Lucene.cleanLuceneIndex(directory); expectThrows(org.apache.lucene.index.IndexNotFoundException.class, () -> Lucene.readSegmentInfos(directory)); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); @@ -127,9 +128,9 @@ public class FsRepositoryTests extends ESTestCase { SnapshotId incSnapshotId = new SnapshotId("test1", "test1"); IndexCommit incIndexCommit = Lucene.getIndexCommit(Lucene.readSegmentInfos(store.directory()), store.directory()); Collection commitFileNames = incIndexCommit.getFileNames(); - final PlainActionFuture future2 = PlainActionFuture.newFuture(); + final PlainActionFuture future2 = PlainActionFuture.newFuture(); runGeneric(threadPool, () -> { - IndexShardSnapshotStatus snapshotStatus = IndexShardSnapshotStatus.newInitializing(); + IndexShardSnapshotStatus snapshotStatus = IndexShardSnapshotStatus.newInitializing(shardGeneration); repository.snapshotShard(store, null, incSnapshotId, indexId, incIndexCommit, snapshotStatus, future2); future2.actionGet(); IndexShardSnapshotStatus.Copy copy = snapshotStatus.asCopy(); diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingActionTests.java deleted file mode 100644 index fab3f2a8a7ea..000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingActionTests.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest.action.admin.indices; - -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.rest.FakeRestChannel; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.util.HashMap; -import java.util.Map; - -import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; - -public class RestGetFieldMappingActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - new RestGetFieldMappingAction(controller()); - } - - public void testIncludeTypeName() { - Map params = new HashMap<>(); - String path; - if (randomBoolean()) { - params.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - path = "some_index/some_type/_mapping/field/some_field"; - } else { - params.put(INCLUDE_TYPE_NAME_PARAMETER, "false"); - path = "some_index/_mapping/field/some_field"; - } - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) - .withPath(path) - .withParams(params) - .build(); - dispatchRequest(deprecatedRequest); - assertWarnings(RestGetFieldMappingAction.TYPES_DEPRECATION_MESSAGE); - - RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) - .withPath("some_index/_mapping/field/some_field") - .build(); - dispatchRequest(validRequest); - } - - public void testTypeInPath() { - // Test that specifying a type while setting include_type_name to false - // results in an illegal argument exception. - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, "false"); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) - .withPath("some_index/some_type/_mapping/field/some_field") - .withParams(params) - .build(); - - FakeRestChannel channel = new FakeRestChannel(request, false, 1); - ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - controller().dispatchRequest(request, channel, threadContext); - - assertEquals(1, channel.errors().get()); - assertEquals(RestStatus.BAD_REQUEST, channel.capturedResponse().status()); - } -} diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingActionTests.java deleted file mode 100644 index 624491b2e42d..000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingActionTests.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest.action.admin.indices; - -import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.rest.FakeRestChannel; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.junit.Before; - -import java.util.HashMap; -import java.util.Map; - -import static org.elasticsearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; -import static org.mockito.Mockito.mock; - -public class RestGetMappingActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - new RestGetMappingAction(controller()); - } - - public void testTypeExistsDeprecation() throws Exception { - Map params = new HashMap<>(); - params.put("type", "_doc"); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.HEAD) - .withParams(params) - .build(); - - RestGetMappingAction handler = new RestGetMappingAction(mock(RestController.class)); - handler.prepareRequest(request, mock(NodeClient.class)); - - assertWarnings("Type exists requests are deprecated, as types have been deprecated."); - } - - public void testTypeInPath() { - // Test that specifying a type while setting include_type_name to false - // results in an illegal argument exception. - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, "false"); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) - .withPath("some_index/some_type/_mapping/some_field") - .withParams(params) - .build(); - - FakeRestChannel channel = new FakeRestChannel(request, false, 1); - ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - controller().dispatchRequest(request, channel, threadContext); - - assertEquals(1, channel.errors().get()); - assertEquals(RestStatus.BAD_REQUEST, channel.capturedResponse().status()); - } - - /** - * Setting "include_type_name" to true or false should cause a deprecation warning starting in 7.0 - */ - public void testTypeUrlParameterDeprecation() throws Exception { - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, Boolean.toString(randomBoolean())); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) - .withParams(params) - .withPath("/some_index/_mappings") - .build(); - - FakeRestChannel channel = new FakeRestChannel(request, false, 1); - ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - controller().dispatchRequest(request, channel, threadContext); - - assertWarnings(RestGetMappingAction.TYPES_DEPRECATION_MESSAGE); - } - -} diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java index e1e66b503e79..bd7fb60b3d56 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java @@ -18,19 +18,17 @@ */ package org.elasticsearch.rest.action.admin.indices; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.search.AbstractSearchTestCase; @@ -86,7 +84,7 @@ public class RestValidateQueryActionTests extends AbstractSearchTestCase { } @AfterClass - public static void terminateThreadPool() throws InterruptedException { + public static void terminateThreadPool() { terminate(threadPool); threadPool = null; @@ -153,32 +151,4 @@ public class RestValidateQueryActionTests extends AbstractSearchTestCase { .build(); } - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) - .withPath("/some_index/some_type/_validate/query") - .build(); - - performRequest(request); - assertWarnings(RestValidateQueryAction.TYPES_DEPRECATION_MESSAGE); - } - - public void testTypeParameter() { - Map params = new HashMap<>(); - params.put("type", "some_type"); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) - .withPath("_validate/query") - .withParams(params) - .build(); - - performRequest(request); - assertWarnings(RestValidateQueryAction.TYPES_DEPRECATION_MESSAGE); - } - - private void performRequest(RestRequest request) { - RestChannel channel = new FakeRestChannel(request, false, 1); - ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - controller.dispatchRequest(request, channel, threadContext); - } } diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java deleted file mode 100644 index c2f5acd1e1fa..000000000000 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestExplainActionTests.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest.action.search; - -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.junit.Before; - -public class RestExplainActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - new RestExplainAction(controller()); - } - - public void testTypeInPath() { - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) - .withPath("/some_index/some_type/some_id/_explain") - .build(); - dispatchRequest(deprecatedRequest); - assertWarnings(RestExplainAction.TYPES_DEPRECATION_MESSAGE); - - RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) - .withPath("/some_index/_explain/some_id") - .build(); - dispatchRequest(validRequest); - } -} diff --git a/server/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java b/server/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java index 41a72d1a0994..9811c8b4a562 100644 --- a/server/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java +++ b/server/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java @@ -568,7 +568,7 @@ public class SimpleRoutingIT extends ESIntegTestCase { } logger.info("--> verifying explain with id [2], with routing [0], should succeed"); - ExplainResponse explainResponse = client().prepareExplain(indexOrAlias(), "type1", "2") + ExplainResponse explainResponse = client().prepareExplain(indexOrAlias(), "2") .setQuery(QueryBuilders.matchAllQuery()) .setRouting(routingValue).get(); assertThat(explainResponse.isExists(), equalTo(true)); @@ -576,7 +576,7 @@ public class SimpleRoutingIT extends ESIntegTestCase { logger.info("--> verifying explain with id [2], with no routing, should fail"); try { - client().prepareExplain(indexOrAlias(), "type1", "2") + client().prepareExplain(indexOrAlias(), "2") .setQuery(QueryBuilders.matchAllQuery()).get(); fail(); } catch (RoutingMissingException e) { diff --git a/server/src/test/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/test/java/org/elasticsearch/search/SearchCancellationIT.java index 81bd844c8fbf..6da30714c750 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/test/java/org/elasticsearch/search/SearchCancellationIT.java @@ -272,7 +272,7 @@ public class SearchCancellationIT extends ESIntegTestCase { LogManager.getLogger(SearchCancellationIT.class).info("Blocking on the document {}", fieldsLookup.get("_id")); hits.incrementAndGet(); try { - awaitBusy(() -> shouldBlock.get() == false); + assertBusy(() -> assertFalse(shouldBlock.get())); } catch (Exception e) { throw new RuntimeException(e); } diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java index bbd5b12ec45e..5d5d26f48967 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -68,7 +67,6 @@ public class SearchHitTests extends AbstractWireSerializingTestCase { public static SearchHit createTestItem(XContentType xContentType, boolean withOptionalInnerHits, boolean transportSerialization) { int internalId = randomInt(); String uid = randomAlphaOfLength(10); - Text type = new Text(randomAlphaOfLengthBetween(5, 10)); NestedIdentity nestedIdentity = null; if (randomBoolean()) { nestedIdentity = NestedIdentityTests.createTestItem(randomIntBetween(0, 2)); @@ -77,7 +75,7 @@ public class SearchHitTests extends AbstractWireSerializingTestCase { if (randomBoolean()) { fields = GetResultTests.randomDocumentFields(xContentType).v2(); } - SearchHit hit = new SearchHit(internalId, uid, type, nestedIdentity, fields); + SearchHit hit = new SearchHit(internalId, uid, nestedIdentity, fields); if (frequently()) { if (rarely()) { hit.score(Float.NaN); @@ -205,16 +203,15 @@ public class SearchHitTests extends AbstractWireSerializingTestCase { } assertEquals("my_index", parsed.getIndex()); assertEquals(1, parsed.getScore(), Float.MIN_VALUE); - assertNull(parsed.getType()); assertNull(parsed.getId()); } public void testToXContent() throws IOException { - SearchHit searchHit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap()); + SearchHit searchHit = new SearchHit(1, "id1", Collections.emptyMap()); searchHit.score(1.5f); XContentBuilder builder = JsonXContent.contentBuilder(); searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals("{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":1.5}", Strings.toString(builder)); + assertEquals("{\"_id\":\"id1\",\"_score\":1.5}", Strings.toString(builder)); } public void testSerializeShardTarget() throws Exception { @@ -223,25 +220,25 @@ public class SearchHitTests extends AbstractWireSerializingTestCase { clusterAlias, OriginalIndices.NONE); Map innerHits = new HashMap<>(); - SearchHit innerHit1 = new SearchHit(0, "_id", new Text("_type"), null); + SearchHit innerHit1 = new SearchHit(0, "_id", null); innerHit1.shard(target); - SearchHit innerInnerHit2 = new SearchHit(0, "_id", new Text("_type"), null); + SearchHit innerInnerHit2 = new SearchHit(0, "_id", null); innerInnerHit2.shard(target); innerHits.put("1", new SearchHits(new SearchHit[]{innerInnerHit2}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); innerHit1.setInnerHits(innerHits); - SearchHit innerHit2 = new SearchHit(0, "_id", new Text("_type"), null); + SearchHit innerHit2 = new SearchHit(0, "_id", null); innerHit2.shard(target); - SearchHit innerHit3 = new SearchHit(0, "_id", new Text("_type"), null); + SearchHit innerHit3 = new SearchHit(0, "_id", null); innerHit3.shard(target); innerHits = new HashMap<>(); - SearchHit hit1 = new SearchHit(0, "_id", new Text("_type"), null); + SearchHit hit1 = new SearchHit(0, "_id", null); innerHits.put("1", new SearchHits(new SearchHit[]{innerHit1, innerHit2}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); innerHits.put("2", new SearchHits(new SearchHit[]{innerHit3}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); hit1.shard(target); hit1.setInnerHits(innerHits); - SearchHit hit2 = new SearchHit(0, "_id", new Text("_type"), null); + SearchHit hit2 = new SearchHit(0, "_id", null); hit2.shard(target); SearchHits hits = new SearchHits(new SearchHit[]{hit1, hit2}, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1f); @@ -268,7 +265,7 @@ public class SearchHitTests extends AbstractWireSerializingTestCase { } public void testNullSource() { - SearchHit searchHit = new SearchHit(0, "_id", new Text("_type"), null); + SearchHit searchHit = new SearchHit(0, "_id", null); assertThat(searchHit.getSourceAsMap(), nullValue()); assertThat(searchHit.getSourceRef(), nullValue()); @@ -290,7 +287,6 @@ public class SearchHitTests extends AbstractWireSerializingTestCase { { XContentParser parser = createParser(XContentType.JSON.xContent(), "{\n" + " \"_index\": \"twitter\",\n" + - " \"_type\": \"tweet\",\n" + " \"_id\": \"1\",\n" + " \"_score\": 1.0,\n" + " \"fields\": {\n" + @@ -308,7 +304,6 @@ public class SearchHitTests extends AbstractWireSerializingTestCase { { XContentParser parser = createParser(XContentType.JSON.xContent(), "{\n" + " \"_index\": \"twitter\",\n" + - " \"_type\": \"tweet\",\n" + " \"_id\": \"1\",\n" + " \"_score\": 1.0,\n" + " \"fields\": {\n" + @@ -330,7 +325,6 @@ public class SearchHitTests extends AbstractWireSerializingTestCase { { XContentParser parser = createParser(JsonXContent.jsonXContent, "{\n" + " \"_index\": \"twitter\",\n" + - " \"_type\": \"tweet\",\n" + " \"_id\": \"1\",\n" + " \"_score\": 1.0,\n" + " \"fields\": {\n" + diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java index c34a2d23d6e2..9d9a6713bbb0 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.LuceneTests; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -204,8 +203,8 @@ public class SearchHitsTests extends AbstractSerializingTestCase { public void testToXContent() throws IOException { SearchHit[] hits = new SearchHit[] { - new SearchHit(1, "id1", new Text("type"), Collections.emptyMap()), - new SearchHit(2, "id2", new Text("type"), Collections.emptyMap()) }; + new SearchHit(1, "id1", Collections.emptyMap()), + new SearchHit(2, "id2", Collections.emptyMap()) }; long totalHits = 1000; float maxScore = 1.5f; @@ -215,16 +214,16 @@ public class SearchHitsTests extends AbstractSerializingTestCase { searchHits.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); assertEquals("{\"hits\":{\"total\":{\"value\":1000,\"relation\":\"eq\"},\"max_score\":1.5," + - "\"hits\":[{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":null},"+ - "{\"_type\":\"type\",\"_id\":\"id2\",\"_score\":null}]}}", Strings.toString(builder)); + "\"hits\":[{\"_id\":\"id1\",\"_score\":null},"+ + "{\"_id\":\"id2\",\"_score\":null}]}}", Strings.toString(builder)); } public void testFromXContentWithShards() throws IOException { for (boolean withExplanation : new boolean[] {true, false}) { final SearchHit[] hits = new SearchHit[]{ - new SearchHit(1, "id1", new Text("type"), Collections.emptyMap()), - new SearchHit(2, "id2", new Text("type"), Collections.emptyMap()), - new SearchHit(10, "id10", new Text("type"), Collections.emptyMap()) + new SearchHit(1, "id1", Collections.emptyMap()), + new SearchHit(2, "id2", Collections.emptyMap()), + new SearchHit(10, "id10", Collections.emptyMap()) }; for (SearchHit hit : hits) { diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index dc3f91f4502a..a4526d979da6 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -73,8 +73,7 @@ import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchRequest; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.ShardSearchLocalRequest; -import org.elasticsearch.search.internal.ShardSearchTransportRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -271,7 +270,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { PlainActionFuture result = new PlainActionFuture<>(); final boolean useScroll = randomBoolean(); service.executeQueryPhase( - new ShardSearchLocalRequest(useScroll ? scrollSearchRequest : searchRequest, + new ShardSearchRequest(OriginalIndices.NONE, useScroll ? scrollSearchRequest : searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null), new SearchTask(123L, "", "", "", null, Collections.emptyMap()), result); @@ -319,7 +318,8 @@ public class SearchServiceTests extends ESSingleNodeTestCase { final IndexShard indexShard = indexService.getShard(0); SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(true); final SearchContext contextWithDefaultTimeout = service.createContext( - new ShardSearchLocalRequest( + new ShardSearchRequest( + OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, @@ -337,7 +337,8 @@ public class SearchServiceTests extends ESSingleNodeTestCase { final long seconds = randomIntBetween(6, 10); searchRequest.source(new SearchSourceBuilder().timeout(TimeValue.timeValueSeconds(seconds))); final SearchContext context = service.createContext( - new ShardSearchLocalRequest( + new ShardSearchRequest( + OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, @@ -371,12 +372,14 @@ public class SearchServiceTests extends ESSingleNodeTestCase { for (int i = 0; i < indexService.getIndexSettings().getMaxDocvalueFields(); i++) { searchSourceBuilder.docValueField("field" + i); } - try (SearchContext context = service.createContext(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, - new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null))) { + try (SearchContext context = service.createContext( + new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, + new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null)) + ) { assertNotNull(context); searchSourceBuilder.docValueField("one_field_too_much"); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> service.createContext(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, + () -> service.createContext(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null))); assertEquals( "Trying to retrieve too many docvalue_fields. Must be less than or equal to: [100] but was [101]. " @@ -404,13 +407,14 @@ public class SearchServiceTests extends ESSingleNodeTestCase { searchSourceBuilder.scriptField("field" + i, new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap())); } - try (SearchContext context = service.createContext(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, - new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null))) { + try (SearchContext context = service.createContext(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, + indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), + 1.0f, -1, null, null))) { assertNotNull(context); searchSourceBuilder.scriptField("anotherScriptField", new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap())); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> service.createContext(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, + () -> service.createContext(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null))); assertEquals( "Trying to retrieve too many script_fields. Must be less than or equal to: [" + maxScriptFields + "] but was [" @@ -433,8 +437,9 @@ public class SearchServiceTests extends ESSingleNodeTestCase { searchSourceBuilder.scriptField("field" + 0, new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap())); searchSourceBuilder.size(0); - try (SearchContext context = service.createContext(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, - new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null))) { + try (SearchContext context = service.createContext(new ShardSearchRequest(OriginalIndices.NONE, + searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), + 1.0f, -1, null, null))) { assertEquals(0, context.scriptFields().fields().size()); } } @@ -532,11 +537,11 @@ public class SearchServiceTests extends ESSingleNodeTestCase { } } - private static class ShardScrollRequestTest extends ShardSearchLocalRequest { + private static class ShardScrollRequestTest extends ShardSearchRequest { private Scroll scroll; ShardScrollRequestTest(ShardId shardId) { - super(new SearchRequest().allowPartialSearchResults(true), + super(OriginalIndices.NONE, new SearchRequest().allowPartialSearchResults(true), shardId, 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null); this.scroll = new Scroll(TimeValue.timeValueMinutes(1)); } @@ -554,28 +559,28 @@ public class SearchServiceTests extends ESSingleNodeTestCase { final IndexService indexService = indicesService.indexServiceSafe(resolveIndex("index")); final IndexShard indexShard = indexService.getShard(0); SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(true); - assertTrue(service.canMatch(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, + assertTrue(service.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); searchRequest.source(new SearchSourceBuilder()); - assertTrue(service.canMatch(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, + assertTrue(service.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); searchRequest.source(new SearchSourceBuilder().query(new MatchAllQueryBuilder())); - assertTrue(service.canMatch(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, + assertTrue(service.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); searchRequest.source(new SearchSourceBuilder().query(new MatchNoneQueryBuilder()) .aggregation(new TermsAggregationBuilder("test", ValueType.STRING).minDocCount(0))); - assertTrue(service.canMatch(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, + assertTrue(service.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); searchRequest.source(new SearchSourceBuilder().query(new MatchNoneQueryBuilder()) .aggregation(new GlobalAggregationBuilder("test"))); - assertTrue(service.canMatch(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, + assertTrue(service.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); searchRequest.source(new SearchSourceBuilder().query(new MatchNoneQueryBuilder())); - assertFalse(service.canMatch(new ShardSearchLocalRequest(searchRequest, indexShard.shardId(), 1, + assertFalse(service.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); } @@ -624,7 +629,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { iae.getMessage()); assertFalse(service.getIndicesService().indexServiceSafe(index).getIndexSettings().isSearchThrottled()); SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(false); - ShardSearchLocalRequest req = new ShardSearchLocalRequest(searchRequest, new ShardId(index, 0), 1, + ShardSearchRequest req = new ShardSearchRequest(OriginalIndices.NONE, searchRequest, new ShardId(index, 0), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null); Thread currentThread = Thread.currentThread(); // we still make sure can match is executed on the network thread @@ -666,7 +671,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(3, 10); SearchRequest searchRequest = new SearchRequest(); searchRequest.allowPartialSearchResults(randomBoolean()); - ShardSearchTransportRequest request = new ShardSearchTransportRequest(OriginalIndices.NONE, searchRequest, shardId, + ShardSearchRequest request = new ShardSearchRequest(OriginalIndices.NONE, searchRequest, shardId, indexService.numberOfShards(), AliasFilter.EMPTY, 1f, nowInMillis, clusterAlias, Strings.EMPTY_ARRAY); try (DefaultSearchContext searchContext = service.createSearchContext(request, new TimeValue(System.currentTimeMillis()))) { SearchShardTarget searchShardTarget = searchContext.shardTarget(); @@ -694,7 +699,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { NullPointerException e = expectThrows(NullPointerException.class, () -> service.createContext( - new ShardSearchLocalRequest(shardId, 0, null) { + new ShardSearchRequest(shardId, 0, null) { @Override public SearchType searchType() { // induce an artificial NPE diff --git a/server/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java b/server/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java index 817e922da0cd..d831bce704c5 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java +++ b/server/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java @@ -41,7 +41,7 @@ public class SearchWithRejectionsIT extends ESIntegTestCase { .build(); } - public void testOpenContextsAfterRejections() throws InterruptedException { + public void testOpenContextsAfterRejections() throws Exception { createIndex("test"); ensureGreen("test"); final int docs = scaledRandomIntBetween(20, 50); @@ -68,10 +68,8 @@ public class SearchWithRejectionsIT extends ESIntegTestCase { } catch (Exception t) { } } - awaitBusy( - () -> client().admin().indices().prepareStats().get().getTotal().getSearch().getOpenContexts() == 0, - 1, TimeUnit.SECONDS); - indicesStats = client().admin().indices().prepareStats().get(); - assertThat(indicesStats.getTotal().getSearch().getOpenContexts(), equalTo(0L)); + assertBusy( + () -> assertThat(client().admin().indices().prepareStats().get().getTotal().getSearch().getOpenContexts(), equalTo(0L)), + 1, TimeUnit.SECONDS); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index ef678df3dca7..441ed43d244d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -56,7 +56,6 @@ import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; @@ -523,16 +522,6 @@ public class RareTermsAggregatorTests extends AggregatorTestCase { return documents; } - - private InternalAggregation buildInternalAggregation(RareTermsAggregationBuilder builder, MappedFieldType fieldType, - IndexSearcher searcher) throws IOException { - AbstractRareTermsAggregator aggregator = createAggregator(builder, searcher, fieldType); - aggregator.preCollection(); - searcher.search(new MatchAllDocsQuery(), aggregator); - aggregator.postCollection(); - return aggregator.buildAggregation(0L); - } - private void testSearchCase(Query query, List dataset, Consumer configure, Consumer verify, ValueType valueType) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java index 4e58d836683e..c278dc8f4a02 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -205,7 +205,6 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase { for (int i = 0; i < totalHits; i++) { SearchHit searchHit = response.getHits().getAt(i); assertThat("Hit " + i + " with id: " + searchHit.getId(), searchHit.getIndex(), equalTo("high_card_idx")); - assertThat("Hit " + i + " with id: " + searchHit.getId(), searchHit.getType(), equalTo("type")); DocumentField hitField = searchHit.field(NUMBER_FIELD_NAME); assertThat("Hit " + i + " has wrong number of values", hitField.getValues().size(), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index 3da9ea7bde2f..b2973b67ce3c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -33,7 +33,6 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; @@ -104,7 +103,7 @@ public class InternalTopHitsTests extends InternalAggregationTestCase").postTags("").forceSource(true))), RestStatus.BAD_REQUEST, - containsString("source is forced for fields [field1] but type [type1] has disabled _source")); + containsString("source is forced for fields [field1] but _source is disabled")); SearchSourceBuilder searchSource = SearchSourceBuilder.searchSource().query(termQuery("field1", "quick")) .highlighter(highlight().forceSource(true).field("field1")); assertFailures(client().prepareSearch("test").setSource(searchSource), RestStatus.BAD_REQUEST, - containsString("source is forced for fields [field1] but type [type1] has disabled _source")); + containsString("source is forced for fields [field1] but _source is disabled")); searchSource = SearchSourceBuilder.searchSource().query(termQuery("field1", "quick")) .highlighter(highlight().forceSource(true).field("field*")); assertFailures(client().prepareSearch("test").setSource(searchSource), RestStatus.BAD_REQUEST, - matches("source is forced for fields \\[field\\d, field\\d\\] but type \\[type1\\] has disabled _source")); + matches("source is forced for fields \\[field\\d, field\\d\\] but _source is disabled")); } public void testPlainHighlighter() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java b/server/src/test/java/org/elasticsearch/search/internal/ShardSearchRequestTests.java similarity index 94% rename from server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java rename to server/src/test/java/org/elasticsearch/search/internal/ShardSearchRequestTests.java index f4211ed32efe..c913bbe4b9a3 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ShardSearchRequestTests.java @@ -49,15 +49,15 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; -public class ShardSearchTransportRequestTests extends AbstractSearchTestCase { +public class ShardSearchRequestTests extends AbstractSearchTestCase { private IndexMetaData baseMetaData = IndexMetaData.builder("test").settings(Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()) .numberOfShards(1).numberOfReplicas(1).build(); public void testSerialization() throws Exception { - ShardSearchTransportRequest shardSearchTransportRequest = createShardSearchTransportRequest(); - ShardSearchTransportRequest deserializedRequest = - copyWriteable(shardSearchTransportRequest, namedWriteableRegistry, ShardSearchTransportRequest::new); + ShardSearchRequest shardSearchTransportRequest = createShardSearchRequest(); + ShardSearchRequest deserializedRequest = + copyWriteable(shardSearchTransportRequest, namedWriteableRegistry, ShardSearchRequest::new); assertEquals(deserializedRequest.scroll(), shardSearchTransportRequest.scroll()); assertEquals(deserializedRequest.getAliasFilter(), shardSearchTransportRequest.getAliasFilter()); assertArrayEquals(deserializedRequest.indices(), shardSearchTransportRequest.indices()); @@ -77,7 +77,7 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase { assertEquals(shardSearchTransportRequest.allowPartialSearchResults(), deserializedRequest.allowPartialSearchResults()); } - private ShardSearchTransportRequest createShardSearchTransportRequest() throws IOException { + private ShardSearchRequest createShardSearchRequest() throws IOException { SearchRequest searchRequest = createSearchRequest(); ShardId shardId = new ShardId(randomAlphaOfLengthBetween(2, 10), randomAlphaOfLengthBetween(2, 10), randomInt()); final AliasFilter filteringAliases; @@ -88,7 +88,7 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase { filteringAliases = new AliasFilter(null, Strings.EMPTY_ARRAY); } final String[] routings = generateRandomStringArray(5, 10, false, true); - return new ShardSearchTransportRequest(new OriginalIndices(searchRequest), searchRequest, shardId, + return new ShardSearchRequest(new OriginalIndices(searchRequest), searchRequest, shardId, randomIntBetween(1, 100), filteringAliases, randomBoolean() ? 1.0f : randomFloat(), Math.abs(randomLong()), randomAlphaOfLengthBetween(3, 10), routings); } diff --git a/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java b/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java index 114060683219..794b881003de 100644 --- a/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/ExistsIT.java @@ -135,9 +135,8 @@ public class ExistsIT extends ESIntegTestCase { } catch (AssertionError e) { for (SearchHit searchHit : allDocs.getHits()) { final String index = searchHit.getIndex(); - final String type = searchHit.getType(); final String id = searchHit.getId(); - final ExplainResponse explanation = client().prepareExplain(index, type, id) + final ExplainResponse explanation = client().prepareExplain(index, id) .setQuery(QueryBuilders.existsQuery(fieldName)).get(); logger.info("Explanation for [{}] / [{}] / [{}]: [{}]", fieldName, id, searchHit.getSourceAsString(), explanation.getExplanation()); diff --git a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 8cbb47f16575..e289f565ea64 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -982,64 +982,64 @@ public class SearchQueryIT extends ESIntegTestCase { client().prepareIndex("test", "type", "4").setSource("term", "4") ); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term" , new TermsLookup("lookup", "type", "1", "terms"))).get(); + .setQuery(termsLookupQuery("term" , new TermsLookup("lookup", "1", "terms"))).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); // same as above, just on the _id... searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("_id", new TermsLookup("lookup", "type", "1", "terms")) + .setQuery(termsLookupQuery("_id", new TermsLookup("lookup", "1", "terms")) ).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); // another search with same parameters... searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "1", "terms"))).get(); + .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "1", "terms"))).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "2", "terms"))).get(); + .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "2", "terms"))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("2")); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "3", "terms"))).get(); + .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "3", "terms"))).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "2", "4"); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "4", "terms"))).get(); + .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "4", "terms"))).get(); assertHitCount(searchResponse, 0L); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "1", "arr.term"))).get(); + .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "1", "arr.term"))).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "2", "arr.term"))).get(); + .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "2", "arr.term"))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("2")); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "3", "arr.term"))).get(); + .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "3", "arr.term"))).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "2", "4"); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "type", "3", "arr.term"))).get(); + .setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "3", "arr.term"))).get(); assertHitCount(searchResponse, 0L); - // index "lookup" type "type" id "missing" document does not exist: ignore the lookup terms + // index "lookup" id "missing" document does not exist: ignore the lookup terms searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term" , new TermsLookup("lookup", "type", "missing", "terms"))).get(); + .setQuery(termsLookupQuery("term" , new TermsLookup("lookup", "missing", "terms"))).get(); assertHitCount(searchResponse, 0L); - // index "lookup3" type "type" has the source disabled: ignore the lookup terms + // index "lookup3" has the source disabled: ignore the lookup terms searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term" , new TermsLookup("lookup3", "type", "1", "terms"))).get(); + .setQuery(termsLookupQuery("term" , new TermsLookup("lookup3", "1", "terms"))).get(); assertHitCount(searchResponse, 0L); } diff --git a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index 806c3dfca676..5a0128bd1cd1 100644 --- a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -27,7 +27,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.SearchContextException; +import org.elasticsearch.search.SearchException; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; @@ -67,7 +67,7 @@ public class SearchAfterIT extends ESIntegTestCase { } catch (SearchPhaseExecutionException e) { assertTrue(e.shardFailures().length > 0); for (ShardSearchFailure failure : e.shardFailures()) { - assertThat(failure.getCause().getClass(), Matchers.equalTo(SearchContextException.class)); + assertThat(failure.getCause().getClass(), Matchers.equalTo(SearchException.class)); assertThat(failure.getCause().getMessage(), Matchers.equalTo("`search_after` cannot be used in a scroll context.")); } } @@ -83,7 +83,7 @@ public class SearchAfterIT extends ESIntegTestCase { } catch (SearchPhaseExecutionException e) { assertTrue(e.shardFailures().length > 0); for (ShardSearchFailure failure : e.shardFailures()) { - assertThat(failure.getCause().getClass(), Matchers.equalTo(SearchContextException.class)); + assertThat(failure.getCause().getClass(), Matchers.equalTo(SearchException.class)); assertThat(failure.getCause().getMessage(), Matchers.equalTo("`from` parameter must be set to 0 when `search_after` is used.")); } diff --git a/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java b/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java index 568c4c596b4e..8e7d95e69a79 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/server/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -31,7 +31,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.Scroll; -import org.elasticsearch.search.SearchContextException; +import org.elasticsearch.search.SearchException; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.test.ESIntegTestCase; @@ -205,7 +205,7 @@ public class SearchSliceIT extends ESIntegTestCase { .slice(new SliceBuilder("invalid_random_int", 0, 10)) .get()); Throwable rootCause = findRootCause(exc); - assertThat(rootCause.getClass(), equalTo(SearchContextException.class)); + assertThat(rootCause.getClass(), equalTo(SearchException.class)); assertThat(rootCause.getMessage(), equalTo("`slice` cannot be used outside of a scroll context")); } diff --git a/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java index 8802b35a694d..f07f3fd98134 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java @@ -30,10 +30,9 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.elasticsearch.Version; -import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchShardIterator; -import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -43,23 +42,17 @@ import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.Scroll; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; @@ -84,115 +77,6 @@ import static org.mockito.Mockito.when; public class SliceBuilderTests extends ESTestCase { private static final int MAX_SLICE = 20; - static class ShardSearchRequestTest implements IndicesRequest, ShardSearchRequest { - private final String[] indices; - private final int shardId; - private final String[] indexRoutings; - private final String preference; - - ShardSearchRequestTest(String index, int shardId, String[] indexRoutings, String preference) { - this.indices = new String[] { index }; - this.shardId = shardId; - this.indexRoutings = indexRoutings; - this.preference = preference; - } - - @Override - public String[] indices() { - return indices; - } - - @Override - public IndicesOptions indicesOptions() { - return null; - } - - @Override - public ShardId shardId() { - return new ShardId(new Index(indices[0], indices[0]), shardId); - } - - @Override - public SearchSourceBuilder source() { - return null; - } - - @Override - public AliasFilter getAliasFilter() { - return null; - } - - @Override - public void setAliasFilter(AliasFilter filter) { - - } - - @Override - public void source(SearchSourceBuilder source) { - - } - - @Override - public int numberOfShards() { - return 0; - } - - @Override - public SearchType searchType() { - return null; - } - - @Override - public float indexBoost() { - return 0; - } - - @Override - public long nowInMillis() { - return 0; - } - - @Override - public Boolean requestCache() { - return null; - } - - @Override - public boolean allowPartialSearchResults() { - return true; - } - - @Override - public Scroll scroll() { - return null; - } - - @Override - public String[] indexRoutings() { - return indexRoutings; - } - - @Override - public String preference() { - return preference; - } - - @Override - public BytesReference cacheKey() { - return null; - } - - @Override - public String getClusterAlias() { - return null; - } - - @Override - public Rewriteable getRewriteable() { - return null; - } - } - private static SliceBuilder randomSliceBuilder() { int max = randomIntBetween(2, MAX_SLICE); int id = randomIntBetween(1, max - 1); @@ -228,7 +112,8 @@ public class SliceBuilderTests extends ESTestCase { } private ShardSearchRequest createRequest(int shardId, String[] routings, String preference) { - return new ShardSearchRequestTest("index", shardId, routings, preference); + return new ShardSearchRequest(OriginalIndices.NONE, new SearchRequest().preference(preference).allowPartialSearchResults(true), + new ShardId("index", "index", shardId), 1, null, 0f, System.currentTimeMillis(), null, routings); } private QueryShardContext createShardContext(Version indexVersionCreated, IndexReader reader, diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java index 0a7c30101d3b..42fde64e3ed0 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -1421,6 +1421,20 @@ public class FieldSortIT extends ESIntegTestCase { .endObject() .endObject() .endObject() + .startObject("bar") + .field("type", "nested") + .startObject("properties") + .startObject("foo") + .field("type", "text") + .field("fielddata", true) + .startObject("fields") + .startObject("sub") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() .endObject() .endObject() .endObject() @@ -1473,6 +1487,22 @@ public class FieldSortIT extends ESIntegTestCase { assertThat(hits[0].getSortValues()[0], is("bar")); assertThat(hits[1].getSortValues()[0], is("abc")); + { + SearchPhaseExecutionException exc = expectThrows(SearchPhaseExecutionException.class, + () -> client().prepareSearch() + .setQuery(matchAllQuery()) + .addSort(SortBuilders + .fieldSort("nested.bar.foo") + .setNestedSort(new NestedSortBuilder("nested") + .setNestedSort(new NestedSortBuilder("nested.bar") + .setMaxChildren(1))) + .order(SortOrder.DESC)) + .get() + ); + assertThat(exc.toString(), + containsString("max_children is only supported on top level of nested sort")); + } + // We sort on nested sub field searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) diff --git a/server/src/test/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/test/java/org/elasticsearch/search/source/MetadataFetchingIT.java index 545968140f6f..1da355559716 100644 --- a/server/src/test/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/test/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -25,7 +25,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.NestedQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.search.SearchContextException; +import org.elasticsearch.search.SearchException; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.test.ESIntegTestCase; @@ -50,7 +50,6 @@ public class MetadataFetchingIT extends ESIntegTestCase { .setFetchSource(false) .get(); assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getType(), equalTo("_doc")); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); response = client() @@ -58,7 +57,6 @@ public class MetadataFetchingIT extends ESIntegTestCase { .storedFields("_none_") .get(); assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getType(), equalTo("_doc")); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); } @@ -82,13 +80,11 @@ public class MetadataFetchingIT extends ESIntegTestCase { .get(); assertThat(response.getHits().getTotalHits().value, equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getType(), equalTo("_doc")); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits hits = response.getHits().getAt(0).getInnerHits().get("nested"); assertThat(hits.getTotalHits().value, equalTo(1L)); assertThat(hits.getAt(0).getId(), nullValue()); - assertThat(hits.getAt(0).getType(), equalTo("_doc")); assertThat(hits.getAt(0).getSourceAsString(), nullValue()); } @@ -105,7 +101,6 @@ public class MetadataFetchingIT extends ESIntegTestCase { .setFetchSource(false) .get(); assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getType(), equalTo("_doc")); assertThat(response.getHits().getAt(0).field("_routing"), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); @@ -114,7 +109,6 @@ public class MetadataFetchingIT extends ESIntegTestCase { .storedFields("_none_") .get(); assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getType(), equalTo("_doc")); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); } @@ -128,18 +122,18 @@ public class MetadataFetchingIT extends ESIntegTestCase { { SearchPhaseExecutionException exc = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("test").setFetchSource(true).storedFields("_none_").get()); - Throwable rootCause = ExceptionsHelper.unwrap(exc, SearchContextException.class); + Throwable rootCause = ExceptionsHelper.unwrap(exc, SearchException.class); assertNotNull(rootCause); - assertThat(rootCause.getClass(), equalTo(SearchContextException.class)); + assertThat(rootCause.getClass(), equalTo(SearchException.class)); assertThat(rootCause.getMessage(), equalTo("`stored_fields` cannot be disabled if _source is requested")); } { SearchPhaseExecutionException exc = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("test").storedFields("_none_").setVersion(true).get()); - Throwable rootCause = ExceptionsHelper.unwrap(exc, SearchContextException.class); + Throwable rootCause = ExceptionsHelper.unwrap(exc, SearchException.class); assertNotNull(rootCause); - assertThat(rootCause.getClass(), equalTo(SearchContextException.class)); + assertThat(rootCause.getClass(), equalTo(SearchException.class)); assertThat(rootCause.getMessage(), equalTo("`stored_fields` cannot be disabled if version is requested")); } diff --git a/server/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/server/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 9766663d58b7..3576beaac54e 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/server/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -208,17 +208,17 @@ public abstract class AbstractSnapshotIntegTestCase extends ESIntegTestCase { } public void waitForBlockOnAnyDataNode(String repository, TimeValue timeout) throws InterruptedException { - if (false == awaitBusy(() -> { - for(RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) { + final boolean blocked = waitUntil(() -> { + for (RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) { MockRepository mockRepository = (MockRepository) repositoriesService.repository(repository); if (mockRepository.blocked()) { return true; } } return false; - }, timeout.millis(), TimeUnit.MILLISECONDS)) { - fail("Timeout waiting for repository block on any data node!!!"); - } + }, timeout.millis(), TimeUnit.MILLISECONDS); + + assertTrue("No repository is blocked waiting on a data node", blocked); } public static void unblockNode(final String repository, final String node) { diff --git a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 47ec110a8a81..50b7fbcc6df0 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -1222,7 +1222,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas logger.info("--> wait for the index to appear"); // that would mean that recovery process started and failing - assertThat(waitForIndex("test-idx", TimeValue.timeValueSeconds(10)), equalTo(true)); + waitForIndex("test-idx", TimeValue.timeValueSeconds(10)); logger.info("--> delete index"); cluster().wipeIndices("test-idx"); @@ -2204,6 +2204,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas assertEquals("test-snap", response.getSnapshots().get(0).getSnapshot().getSnapshotId().getName()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/46276") public void testSnapshotRelocatingPrimary() throws Exception { Client client = client(); logger.info("--> creating repository"); @@ -2728,8 +2729,11 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas restoreFut.get(); } - private boolean waitForIndex(final String index, TimeValue timeout) throws InterruptedException { - return awaitBusy(() -> indexExists(index), timeout.millis(), TimeUnit.MILLISECONDS); + private void waitForIndex(final String index, TimeValue timeout) throws Exception { + assertBusy( + () -> assertTrue("Expected index [" + index + "] to exist", indexExists(index)), + timeout.millis(), + TimeUnit.MILLISECONDS); } public void testSnapshotName() throws Exception { @@ -3035,23 +3039,10 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .setWaitForCompletion(true) .get() .getSnapshotInfo(); - assertThat(snapshotInfo2.state(), equalTo(SnapshotState.SUCCESS)); - assertThat(snapshotInfo2.failedShards(), equalTo(0)); - assertThat(snapshotInfo2.successfulShards(), equalTo(snapshotInfo.totalShards())); + assertThat(snapshotInfo2.state(), equalTo(SnapshotState.PARTIAL)); + assertThat(snapshotInfo2.failedShards(), equalTo(1)); + assertThat(snapshotInfo2.successfulShards(), equalTo(snapshotInfo.totalShards() - 1)); assertThat(snapshotInfo2.indices(), hasSize(1)); - - logger.info("--> deleting index [{}]", indexName); - assertAcked(client().admin().indices().prepareDelete(indexName)); - - logger.info("--> restoring snapshot [{}]", snapshot2); - client().admin().cluster().prepareRestoreSnapshot("test-repo", snapshot2) - .setRestoreGlobalState(randomBoolean()) - .setWaitForCompletion(true) - .get(); - - ensureGreen(); - - assertHitCount(client().prepareSearch(indexName).setSize(0).get(), 2 * nDocs); } public void testCannotCreateSnapshotsWithSameName() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index f88a5b7a484d..b6cb04271e7b 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -165,7 +165,6 @@ import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.snapshots.mockstore.MockEventuallyConsistentRepository; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.disruption.DisruptableMockTransport; -import org.elasticsearch.test.disruption.NetworkDisruption; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportInterceptor; @@ -240,7 +239,7 @@ public class SnapshotResiliencyTests extends ESTestCase { (BlobStoreRepository) testClusterNodes.randomMasterNodeSafe().repositoriesService.repository("repo"), Runnable::run); } finally { - testClusterNodes.nodes.values().forEach(TestClusterNode::stop); + testClusterNodes.nodes.values().forEach(TestClusterNodes.TestClusterNode::stop); } } @@ -253,7 +252,7 @@ public class SnapshotResiliencyTests extends ESTestCase { final int shards = randomIntBetween(1, 10); final int documents = randomIntBetween(0, 100); - final TestClusterNode masterNode = + final TestClusterNodes.TestClusterNode masterNode = testClusterNodes.currentMaster(testClusterNodes.nodes.values().iterator().next().clusterService.state()); final StepListener createSnapshotResponseListener = new StepListener<>(); @@ -326,7 +325,7 @@ public class SnapshotResiliencyTests extends ESTestCase { final String index = "test"; final int shards = randomIntBetween(1, 10); - TestClusterNode masterNode = + TestClusterNodes.TestClusterNode masterNode = testClusterNodes.currentMaster(testClusterNodes.nodes.values().iterator().next().clusterService.state()); final StepListener createSnapshotResponseStepListener = new StepListener<>(); @@ -363,7 +362,7 @@ public class SnapshotResiliencyTests extends ESTestCase { clearDisruptionsAndAwaitSync(); - final TestClusterNode randomMaster = testClusterNodes.randomMasterNode() + final TestClusterNodes.TestClusterNode randomMaster = testClusterNodes.randomMasterNode() .orElseThrow(() -> new AssertionError("expected to find at least one active master node")); SnapshotsInProgress finalSnapshotsInProgress = randomMaster.clusterService.state().custom(SnapshotsInProgress.TYPE); assertThat(finalSnapshotsInProgress.entries(), empty()); @@ -380,7 +379,7 @@ public class SnapshotResiliencyTests extends ESTestCase { final String index = "test"; final int shards = randomIntBetween(1, 10); - TestClusterNode masterNode = + TestClusterNodes.TestClusterNode masterNode = testClusterNodes.currentMaster(testClusterNodes.nodes.values().iterator().next().clusterService.state()); final StepListener createSnapshotResponseStepListener = new StepListener<>(); @@ -431,7 +430,7 @@ public class SnapshotResiliencyTests extends ESTestCase { final int shards = randomIntBetween(1, 10); - final TestClusterNode masterNode = + final TestClusterNodes.TestClusterNode masterNode = testClusterNodes.currentMaster(testClusterNodes.nodes.values().iterator().next().clusterService.state()); final AtomicBoolean createdSnapshot = new AtomicBoolean(); final AdminClient masterAdminClient = masterNode.client.admin(); @@ -443,8 +442,8 @@ public class SnapshotResiliencyTests extends ESTestCase { continueOrDie(clusterStateResponseStepListener, clusterStateResponse -> { final ShardRouting shardToRelocate = clusterStateResponse.getState().routingTable().allShards(index).get(0); - final TestClusterNode currentPrimaryNode = testClusterNodes.nodeById(shardToRelocate.currentNodeId()); - final TestClusterNode otherNode = testClusterNodes.randomDataNodeSafe(currentPrimaryNode.node.getName()); + final TestClusterNodes.TestClusterNode currentPrimaryNode = testClusterNodes.nodeById(shardToRelocate.currentNodeId()); + final TestClusterNodes.TestClusterNode otherNode = testClusterNodes.randomDataNodeSafe(currentPrimaryNode.node.getName()); scheduleNow(() -> testClusterNodes.stopNode(currentPrimaryNode)); scheduleNow(new Runnable() { @Override @@ -504,7 +503,7 @@ public class SnapshotResiliencyTests extends ESTestCase { final int shards = randomIntBetween(1, 10); final int documents = randomIntBetween(2, 100); - TestClusterNode masterNode = + TestClusterNodes.TestClusterNode masterNode = testClusterNodes.currentMaster(testClusterNodes.nodes.values().iterator().next().clusterService.state()); final StepListener createSnapshotResponseStepListener = new StepListener<>(); @@ -574,7 +573,8 @@ public class SnapshotResiliencyTests extends ESTestCase { assertEquals(0, snapshotInfo.failedShards()); } - private StepListener createRepoAndIndex(TestClusterNode masterNode, String repoName, String index, int shards) { + private StepListener createRepoAndIndex(TestClusterNodes.TestClusterNode masterNode, String repoName, + String index, int shards) { final AdminClient adminClient = masterNode.client.admin(); final StepListener createRepositoryListener = new StepListener<>(); @@ -604,7 +604,7 @@ public class SnapshotResiliencyTests extends ESTestCase { if (randomBoolean()) { disconnectRandomDataNode(); } else { - testClusterNodes.randomDataNode().ifPresent(TestClusterNode::restart); + testClusterNodes.randomDataNode().ifPresent(TestClusterNodes.TestClusterNode::restart); } } @@ -712,7 +712,10 @@ public class SnapshotResiliencyTests extends ESTestCase { // LinkedHashMap so we have deterministic ordering when iterating over the map in tests private final Map nodes = new LinkedHashMap<>(); - private final DisconnectedNodes disruptedLinks = new DisconnectedNodes(); + /** + * Node ids that are disconnected from all other nodes. + */ + private final Set disconnectedNodes = new HashSet<>(); TestClusterNodes(int masterNodes, int dataNodes) { for (int i = 0; i < masterNodes; ++i) { @@ -751,7 +754,7 @@ public class SnapshotResiliencyTests extends ESTestCase { private TestClusterNode newNode(String nodeName, DiscoveryNodeRole role) throws IOException { return new TestClusterNode( new DiscoveryNode(nodeName, randomAlphaOfLength(10), buildNewFakeTransportAddress(), emptyMap(), - Collections.singleton(role), Version.CURRENT), this::getDisruption); + Collections.singleton(role), Version.CURRENT)); } public TestClusterNode randomMasterNodeSafe() { @@ -790,16 +793,16 @@ public class SnapshotResiliencyTests extends ESTestCase { } public void disconnectNode(TestClusterNode node) { - if (disruptedLinks.disconnected.contains(node.node.getName())) { + if (disconnectedNodes.contains(node.node.getId())) { return; } testClusterNodes.nodes.values().forEach(n -> n.transportService.getConnectionManager().disconnectFromNode(node.node)); - disruptedLinks.disconnect(node.node.getName()); + disconnectedNodes.add(node.node.getId()); } public void clearNetworkDisruptions() { - final Set disconnectedNodes = new HashSet<>(disruptedLinks.disconnected); - disruptedLinks.clear(); + final Set disconnectedNodes = new HashSet<>(this.disconnectedNodes); + this.disconnectedNodes.clear(); disconnectedNodes.forEach(nodeName -> { if (testClusterNodes.nodes.containsKey(nodeName)) { final DiscoveryNode node = testClusterNodes.nodes.get(nodeName).node; @@ -808,10 +811,6 @@ public class SnapshotResiliencyTests extends ESTestCase { }); } - private NetworkDisruption.DisruptedLinks getDisruption() { - return disruptedLinks; - } - /** * Builds a {@link DiscoveryNodes} instance that holds the nodes in this test cluster. * @return DiscoveryNodes @@ -833,392 +832,367 @@ public class SnapshotResiliencyTests extends ESTestCase { assertTrue(master.node.isMasterNode()); return master; } - } - private final class TestClusterNode { + private final class TestClusterNode { - private final Logger logger = LogManager.getLogger(TestClusterNode.class); + private final Logger logger = LogManager.getLogger(TestClusterNode.class); - private final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Stream.concat( - ClusterModule.getNamedWriteables().stream(), NetworkModule.getNamedWriteables().stream()).collect(Collectors.toList())); + private final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Stream.concat( + ClusterModule.getNamedWriteables().stream(), NetworkModule.getNamedWriteables().stream()).collect(Collectors.toList())); - private final TransportService transportService; + private final TransportService transportService; - private final ClusterService clusterService; + private final ClusterService clusterService; - private final RepositoriesService repositoriesService; + private final RepositoriesService repositoriesService; - private final SnapshotsService snapshotsService; + private final SnapshotsService snapshotsService; - private final SnapshotShardsService snapshotShardsService; + private final SnapshotShardsService snapshotShardsService; - private final IndicesService indicesService; + private final IndicesService indicesService; - private final IndicesClusterStateService indicesClusterStateService; + private final IndicesClusterStateService indicesClusterStateService; - private final DiscoveryNode node; + private final DiscoveryNode node; - private final MasterService masterService; + private final MasterService masterService; - private final AllocationService allocationService; + private final AllocationService allocationService; - private final NodeClient client; + private final NodeClient client; - private final NodeEnvironment nodeEnv; + private final NodeEnvironment nodeEnv; - private final DisruptableMockTransport mockTransport; + private final DisruptableMockTransport mockTransport; - private final ThreadPool threadPool; + private final ThreadPool threadPool; - private final Supplier disruption; + private Coordinator coordinator; - private Coordinator coordinator; + TestClusterNode(DiscoveryNode node) throws IOException { + this.node = node; + final Environment environment = createEnvironment(node.getName()); + masterService = new FakeThreadPoolMasterService(node.getName(), "test", deterministicTaskQueue::scheduleNow); + final Settings settings = environment.settings(); + final ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + threadPool = deterministicTaskQueue.getThreadPool(); + clusterService = new ClusterService(settings, clusterSettings, masterService, + new ClusterApplierService(node.getName(), settings, clusterSettings, threadPool) { + @Override + protected PrioritizedEsThreadPoolExecutor createThreadPoolExecutor() { + return new MockSinglePrioritizingExecutor(node.getName(), deterministicTaskQueue); + } - TestClusterNode(DiscoveryNode node, Supplier disruption) throws IOException { - this.disruption = disruption; - this.node = node; - final Environment environment = createEnvironment(node.getName()); - masterService = new FakeThreadPoolMasterService(node.getName(), "test", deterministicTaskQueue::scheduleNow); - final Settings settings = environment.settings(); - final ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - threadPool = deterministicTaskQueue.getThreadPool(); - clusterService = new ClusterService(settings, clusterSettings, masterService, - new ClusterApplierService(node.getName(), settings, clusterSettings, threadPool) { + @Override + protected void connectToNodesAndWait(ClusterState newClusterState) { + // don't do anything, and don't block + } + }); + mockTransport = new DisruptableMockTransport(node, logger) { @Override - protected PrioritizedEsThreadPoolExecutor createThreadPoolExecutor() { - return new MockSinglePrioritizingExecutor(node.getName(), deterministicTaskQueue); + protected ConnectionStatus getConnectionStatus(DiscoveryNode destination) { + if (node.equals(destination)) { + return ConnectionStatus.CONNECTED; + } + // Check if both nodes are still part of the cluster + if (nodes.containsKey(node.getName()) == false || nodes.containsKey(destination.getName()) == false) { + return ConnectionStatus.DISCONNECTED; + } + return disconnectedNodes.contains(node.getId()) || disconnectedNodes.contains(destination.getId()) + ? ConnectionStatus.DISCONNECTED : ConnectionStatus.CONNECTED; } @Override - protected void connectToNodesAndWait(ClusterState newClusterState) { - // don't do anything, and don't block + protected Optional getDisruptableMockTransport(TransportAddress address) { + return nodes.values().stream().map(cn -> cn.mockTransport) + .filter(transport -> transport.getLocalNode().getAddress().equals(address)) + .findAny(); + } + + @Override + protected void execute(Runnable runnable) { + scheduleNow(CoordinatorTests.onNodeLog(getLocalNode(), runnable)); + } + + @Override + protected NamedWriteableRegistry writeableRegistry() { + return namedWriteableRegistry; + } + }; + transportService = mockTransport.createTransportService( + settings, deterministicTaskQueue.getThreadPool(runnable -> CoordinatorTests.onNodeLog(node, runnable)), + new TransportInterceptor() { + @Override + public TransportRequestHandler interceptHandler(String action, String executor, + boolean forceExecution, TransportRequestHandler actualHandler) { + // TODO: Remove this hack once recoveries are async and can be used in these tests + if (action.startsWith("internal:index/shard/recovery")) { + return (request, channel, task) -> scheduleSoon( + new AbstractRunnable() { + @Override + protected void doRun() throws Exception { + channel.sendResponse(new TransportException(new IOException("failed to recover shard"))); + } + + @Override + public void onFailure(final Exception e) { + throw new AssertionError(e); + } + }); + } else { + return actualHandler; + } + } + }, + a -> node, null, emptySet() + ); + final IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(); + repositoriesService = new RepositoriesService( + settings, clusterService, transportService, + Collections.singletonMap(FsRepository.TYPE, getRepoFactory(environment)), emptyMap(), threadPool + ); + snapshotsService = + new SnapshotsService(settings, clusterService, indexNameExpressionResolver, repositoriesService, threadPool); + nodeEnv = new NodeEnvironment(settings, environment); + final NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(Collections.emptyList()); + final ScriptService scriptService = new ScriptService(settings, emptyMap(), emptyMap()); + client = new NodeClient(settings, threadPool); + allocationService = ESAllocationTestCase.createAllocationService(settings); + final IndexScopedSettings indexScopedSettings = + new IndexScopedSettings(settings, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); + final BigArrays bigArrays = new BigArrays(new PageCacheRecycler(settings), null, "test"); + final MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry(); + indicesService = new IndicesService( + settings, + mock(PluginsService.class), + nodeEnv, + namedXContentRegistry, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), + emptyMap(), emptyMap(), emptyMap(), emptyMap()), + indexNameExpressionResolver, + mapperRegistry, + namedWriteableRegistry, + threadPool, + indexScopedSettings, + new NoneCircuitBreakerService(), + bigArrays, + scriptService, + clusterService, + client, + new MetaStateService(nodeEnv, namedXContentRegistry), + Collections.emptyList(), + emptyMap() + ); + final RecoverySettings recoverySettings = new RecoverySettings(settings, clusterSettings); + final ActionFilters actionFilters = new ActionFilters(emptySet()); + snapshotShardsService = new SnapshotShardsService( + settings, clusterService, repositoriesService, threadPool, + transportService, indicesService, actionFilters, indexNameExpressionResolver); + final ShardStateAction shardStateAction = new ShardStateAction( + clusterService, transportService, allocationService, + new BatchedRerouteService(clusterService, allocationService::reroute), + threadPool + ); + @SuppressWarnings("rawtypes") + Map actions = new HashMap<>(); + actions.put(GlobalCheckpointSyncAction.TYPE, + new GlobalCheckpointSyncAction(settings, transportService, clusterService, indicesService, + threadPool, shardStateAction, actionFilters, indexNameExpressionResolver)); + actions.put(RetentionLeaseBackgroundSyncAction.TYPE, + new RetentionLeaseBackgroundSyncAction(settings, transportService, clusterService, indicesService, threadPool, + shardStateAction, actionFilters, indexNameExpressionResolver)); + actions.put(RetentionLeaseSyncAction.TYPE, + new RetentionLeaseSyncAction(settings, transportService, clusterService, indicesService, threadPool, + shardStateAction, actionFilters, indexNameExpressionResolver)); + final MetaDataMappingService metaDataMappingService = new MetaDataMappingService(clusterService, indicesService); + indicesClusterStateService = new IndicesClusterStateService( + settings, + indicesService, + clusterService, + threadPool, + new PeerRecoveryTargetService(threadPool, transportService, recoverySettings, clusterService), + shardStateAction, + new NodeMappingRefreshAction(transportService, metaDataMappingService), + repositoriesService, + mock(SearchService.class), + new SyncedFlushService(indicesService, clusterService, transportService, indexNameExpressionResolver), + new PeerRecoverySourceService(transportService, indicesService, recoverySettings), + snapshotShardsService, + new PrimaryReplicaSyncer( + transportService, + new TransportResyncReplicationAction( + settings, + transportService, + clusterService, + indicesService, + threadPool, + shardStateAction, + actionFilters, + indexNameExpressionResolver)), + client); + final MetaDataCreateIndexService metaDataCreateIndexService = new MetaDataCreateIndexService(settings, clusterService, + indicesService, + allocationService, new AliasValidator(), environment, indexScopedSettings, + threadPool, namedXContentRegistry, false); + actions.put(CreateIndexAction.INSTANCE, + new TransportCreateIndexAction( + transportService, clusterService, threadPool, + metaDataCreateIndexService, + actionFilters, indexNameExpressionResolver + )); + final MappingUpdatedAction mappingUpdatedAction = new MappingUpdatedAction(settings, clusterSettings); + mappingUpdatedAction.setClient(client); + actions.put(BulkAction.INSTANCE, + new TransportBulkAction(threadPool, transportService, clusterService, + new IngestService( + clusterService, threadPool, environment, scriptService, + new AnalysisModule(environment, Collections.emptyList()).getAnalysisRegistry(), + Collections.emptyList(), client), + client, actionFilters, indexNameExpressionResolver, + new AutoCreateIndex(settings, clusterSettings, indexNameExpressionResolver) + )); + final TransportShardBulkAction transportShardBulkAction = new TransportShardBulkAction(settings, transportService, + clusterService, indicesService, threadPool, shardStateAction, mappingUpdatedAction, new UpdateHelper(scriptService), + actionFilters, indexNameExpressionResolver); + actions.put(TransportShardBulkAction.TYPE, transportShardBulkAction); + final RestoreService restoreService = new RestoreService( + clusterService, repositoriesService, allocationService, + metaDataCreateIndexService, + new MetaDataIndexUpgradeService( + settings, namedXContentRegistry, + mapperRegistry, + indexScopedSettings, + Collections.emptyList() + ), + clusterSettings + ); + actions.put(PutMappingAction.INSTANCE, + new TransportPutMappingAction(transportService, clusterService, threadPool, metaDataMappingService, + actionFilters, indexNameExpressionResolver, new RequestValidators<>(Collections.emptyList()))); + final ResponseCollectorService responseCollectorService = new ResponseCollectorService(clusterService); + final SearchTransportService searchTransportService = new SearchTransportService(transportService, + SearchExecutionStatsCollector.makeWrapper(responseCollectorService)); + final SearchService searchService = new SearchService(clusterService, indicesService, threadPool, scriptService, + bigArrays, new FetchPhase(Collections.emptyList()), responseCollectorService); + actions.put(SearchAction.INSTANCE, + new TransportSearchAction(threadPool, transportService, searchService, + searchTransportService, new SearchPhaseController(searchService::createReduceContext), clusterService, + actionFilters, indexNameExpressionResolver)); + actions.put(RestoreSnapshotAction.INSTANCE, + new TransportRestoreSnapshotAction(transportService, clusterService, threadPool, restoreService, actionFilters, + indexNameExpressionResolver)); + actions.put(DeleteIndexAction.INSTANCE, + new TransportDeleteIndexAction( + transportService, clusterService, threadPool, + new MetaDataDeleteIndexService(settings, clusterService, allocationService), actionFilters, + indexNameExpressionResolver, new DestructiveOperations(settings, clusterSettings))); + actions.put(PutRepositoryAction.INSTANCE, + new TransportPutRepositoryAction( + transportService, clusterService, repositoriesService, threadPool, + actionFilters, indexNameExpressionResolver + )); + actions.put(CreateSnapshotAction.INSTANCE, + new TransportCreateSnapshotAction( + transportService, clusterService, threadPool, + snapshotsService, actionFilters, indexNameExpressionResolver + )); + actions.put(ClusterRerouteAction.INSTANCE, + new TransportClusterRerouteAction(transportService, clusterService, threadPool, allocationService, + actionFilters, indexNameExpressionResolver)); + actions.put(ClusterStateAction.INSTANCE, + new TransportClusterStateAction(transportService, clusterService, threadPool, + actionFilters, indexNameExpressionResolver)); + actions.put(IndicesShardStoresAction.INSTANCE, + new TransportIndicesShardStoresAction( + transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, + client)); + actions.put(TransportNodesListGatewayStartedShards.TYPE, new TransportNodesListGatewayStartedShards(settings, + threadPool, clusterService, transportService, actionFilters, nodeEnv, indicesService, namedXContentRegistry)); + actions.put(DeleteSnapshotAction.INSTANCE, + new TransportDeleteSnapshotAction( + transportService, clusterService, threadPool, + snapshotsService, actionFilters, indexNameExpressionResolver + )); + client.initialize(actions, transportService.getTaskManager(), + () -> clusterService.localNode().getId(), transportService.getRemoteClusterService()); + } + + private Repository.Factory getRepoFactory(Environment environment) { + // Run half the tests with the eventually consistent repository + if (blobStoreContext == null) { + return metaData -> { + final Repository repository = new FsRepository(metaData, environment, xContentRegistry(), threadPool) { + @Override + protected void assertSnapshotOrGenericThread() { + // eliminate thread name check as we create repo in the test thread + } + }; + repository.start(); + return repository; + }; + } else { + return metaData -> { + final Repository repository = new MockEventuallyConsistentRepository( + metaData, xContentRegistry(), deterministicTaskQueue.getThreadPool(), blobStoreContext); + repository.start(); + return repository; + }; + } + } + public void restart() { + testClusterNodes.disconnectNode(this); + final ClusterState oldState = this.clusterService.state(); + stop(); + nodes.remove(node.getName()); + scheduleSoon(() -> { + try { + final TestClusterNode restartedNode = new TestClusterNode( + new DiscoveryNode(node.getName(), node.getId(), node.getAddress(), emptyMap(), + node.getRoles(), Version.CURRENT)); + nodes.put(node.getName(), restartedNode); + restartedNode.start(oldState); + } catch (IOException e) { + throw new AssertionError(e); } }); - mockTransport = new DisruptableMockTransport(node, logger) { - @Override - protected ConnectionStatus getConnectionStatus(DiscoveryNode destination) { - return disruption.get().disrupt(node.getName(), destination.getName()) - ? ConnectionStatus.DISCONNECTED : ConnectionStatus.CONNECTED; - } - - @Override - protected Optional getDisruptableMockTransport(TransportAddress address) { - return testClusterNodes.nodes.values().stream().map(cn -> cn.mockTransport) - .filter(transport -> transport.getLocalNode().getAddress().equals(address)) - .findAny(); - } - - @Override - protected void execute(Runnable runnable) { - scheduleNow(CoordinatorTests.onNodeLog(getLocalNode(), runnable)); - } - - @Override - protected NamedWriteableRegistry writeableRegistry() { - return namedWriteableRegistry; - } - }; - transportService = mockTransport.createTransportService( - settings, deterministicTaskQueue.getThreadPool(runnable -> CoordinatorTests.onNodeLog(node, runnable)), - new TransportInterceptor() { - @Override - public TransportRequestHandler interceptHandler(String action, String executor, - boolean forceExecution, TransportRequestHandler actualHandler) { - // TODO: Remove this hack once recoveries are async and can be used in these tests - if (action.startsWith("internal:index/shard/recovery")) { - return (request, channel, task) -> scheduleSoon( - new AbstractRunnable() { - @Override - protected void doRun() throws Exception { - channel.sendResponse(new TransportException(new IOException("failed to recover shard"))); - } - - @Override - public void onFailure(final Exception e) { - throw new AssertionError(e); - } - }); - } else { - return actualHandler; - } - } - }, - a -> node, null, emptySet() - ); - final IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(); - repositoriesService = new RepositoriesService( - settings, clusterService, transportService, - Collections.singletonMap(FsRepository.TYPE, getRepoFactory(environment)), emptyMap(), threadPool - ); - snapshotsService = - new SnapshotsService(settings, clusterService, indexNameExpressionResolver, repositoriesService, threadPool); - nodeEnv = new NodeEnvironment(settings, environment); - final NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(Collections.emptyList()); - final ScriptService scriptService = new ScriptService(settings, emptyMap(), emptyMap()); - client = new NodeClient(settings, threadPool); - allocationService = ESAllocationTestCase.createAllocationService(settings); - final IndexScopedSettings indexScopedSettings = - new IndexScopedSettings(settings, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); - final BigArrays bigArrays = new BigArrays(new PageCacheRecycler(settings), null, "test"); - final MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry(); - indicesService = new IndicesService( - settings, - mock(PluginsService.class), - nodeEnv, - namedXContentRegistry, - new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), - emptyMap(), emptyMap(), emptyMap(), emptyMap()), - indexNameExpressionResolver, - mapperRegistry, - namedWriteableRegistry, - threadPool, - indexScopedSettings, - new NoneCircuitBreakerService(), - bigArrays, - scriptService, - clusterService, - client, - new MetaStateService(nodeEnv, namedXContentRegistry), - Collections.emptyList(), - emptyMap() - ); - final RecoverySettings recoverySettings = new RecoverySettings(settings, clusterSettings); - final ActionFilters actionFilters = new ActionFilters(emptySet()); - snapshotShardsService = new SnapshotShardsService( - settings, clusterService, repositoriesService, threadPool, - transportService, indicesService, actionFilters, indexNameExpressionResolver); - final ShardStateAction shardStateAction = new ShardStateAction( - clusterService, transportService, allocationService, - new BatchedRerouteService(clusterService, allocationService::reroute), - threadPool - ); - @SuppressWarnings("rawtypes") - Map actions = new HashMap<>(); - actions.put(GlobalCheckpointSyncAction.TYPE, - new GlobalCheckpointSyncAction(settings, transportService, clusterService, indicesService, - threadPool, shardStateAction, actionFilters, indexNameExpressionResolver)); - actions.put(RetentionLeaseBackgroundSyncAction.TYPE, - new RetentionLeaseBackgroundSyncAction(settings, transportService, clusterService, indicesService, threadPool, - shardStateAction, actionFilters, indexNameExpressionResolver)); - actions.put(RetentionLeaseSyncAction.TYPE, - new RetentionLeaseSyncAction(settings, transportService, clusterService, indicesService, threadPool, - shardStateAction, actionFilters, indexNameExpressionResolver)); - final MetaDataMappingService metaDataMappingService = new MetaDataMappingService(clusterService, indicesService); - indicesClusterStateService = new IndicesClusterStateService( - settings, - indicesService, - clusterService, - threadPool, - new PeerRecoveryTargetService(threadPool, transportService, recoverySettings, clusterService), - shardStateAction, - new NodeMappingRefreshAction(transportService, metaDataMappingService), - repositoriesService, - mock(SearchService.class), - new SyncedFlushService(indicesService, clusterService, transportService, indexNameExpressionResolver), - new PeerRecoverySourceService(transportService, indicesService, recoverySettings), - snapshotShardsService, - new PrimaryReplicaSyncer( - transportService, - new TransportResyncReplicationAction( - settings, - transportService, - clusterService, - indicesService, - threadPool, - shardStateAction, - actionFilters, - indexNameExpressionResolver)), - client); - final MetaDataCreateIndexService metaDataCreateIndexService = new MetaDataCreateIndexService(settings, clusterService, - indicesService, - allocationService, new AliasValidator(), environment, indexScopedSettings, - threadPool, namedXContentRegistry, false); - actions.put(CreateIndexAction.INSTANCE, - new TransportCreateIndexAction( - transportService, clusterService, threadPool, - metaDataCreateIndexService, - actionFilters, indexNameExpressionResolver - )); - final MappingUpdatedAction mappingUpdatedAction = new MappingUpdatedAction(settings, clusterSettings); - mappingUpdatedAction.setClient(client); - actions.put(BulkAction.INSTANCE, - new TransportBulkAction(threadPool, transportService, clusterService, - new IngestService( - clusterService, threadPool, environment, scriptService, - new AnalysisModule(environment, Collections.emptyList()).getAnalysisRegistry(), - Collections.emptyList(), client), - client, actionFilters, indexNameExpressionResolver, - new AutoCreateIndex(settings, clusterSettings, indexNameExpressionResolver) - )); - final TransportShardBulkAction transportShardBulkAction = new TransportShardBulkAction(settings, transportService, - clusterService, indicesService, threadPool, shardStateAction, mappingUpdatedAction, new UpdateHelper(scriptService), - actionFilters, indexNameExpressionResolver); - actions.put(TransportShardBulkAction.TYPE, transportShardBulkAction); - final RestoreService restoreService = new RestoreService( - clusterService, repositoriesService, allocationService, - metaDataCreateIndexService, - new MetaDataIndexUpgradeService( - settings, namedXContentRegistry, - mapperRegistry, - indexScopedSettings, - Collections.emptyList() - ), - clusterSettings - ); - actions.put(PutMappingAction.INSTANCE, - new TransportPutMappingAction(transportService, clusterService, threadPool, metaDataMappingService, - actionFilters, indexNameExpressionResolver, new RequestValidators<>(Collections.emptyList()))); - final ResponseCollectorService responseCollectorService = new ResponseCollectorService(clusterService); - final SearchTransportService searchTransportService = new SearchTransportService(transportService, - SearchExecutionStatsCollector.makeWrapper(responseCollectorService)); - final SearchService searchService = new SearchService(clusterService, indicesService, threadPool, scriptService, - bigArrays, new FetchPhase(Collections.emptyList()), responseCollectorService); - actions.put(SearchAction.INSTANCE, - new TransportSearchAction(threadPool, transportService, searchService, - searchTransportService, new SearchPhaseController(searchService::createReduceContext), clusterService, - actionFilters, indexNameExpressionResolver)); - actions.put(RestoreSnapshotAction.INSTANCE, - new TransportRestoreSnapshotAction(transportService, clusterService, threadPool, restoreService, actionFilters, - indexNameExpressionResolver)); - actions.put(DeleteIndexAction.INSTANCE, - new TransportDeleteIndexAction( - transportService, clusterService, threadPool, - new MetaDataDeleteIndexService(settings, clusterService, allocationService), actionFilters, - indexNameExpressionResolver, new DestructiveOperations(settings, clusterSettings))); - actions.put(PutRepositoryAction.INSTANCE, - new TransportPutRepositoryAction( - transportService, clusterService, repositoriesService, threadPool, - actionFilters, indexNameExpressionResolver - )); - actions.put(CreateSnapshotAction.INSTANCE, - new TransportCreateSnapshotAction( - transportService, clusterService, threadPool, - snapshotsService, actionFilters, indexNameExpressionResolver - )); - actions.put(ClusterRerouteAction.INSTANCE, - new TransportClusterRerouteAction(transportService, clusterService, threadPool, allocationService, - actionFilters, indexNameExpressionResolver)); - actions.put(ClusterStateAction.INSTANCE, - new TransportClusterStateAction(transportService, clusterService, threadPool, - actionFilters, indexNameExpressionResolver)); - actions.put(IndicesShardStoresAction.INSTANCE, - new TransportIndicesShardStoresAction( - transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, - client)); - actions.put(TransportNodesListGatewayStartedShards.TYPE, new TransportNodesListGatewayStartedShards(settings, - threadPool, clusterService, transportService, actionFilters, nodeEnv, indicesService, namedXContentRegistry)); - actions.put(DeleteSnapshotAction.INSTANCE, - new TransportDeleteSnapshotAction( - transportService, clusterService, threadPool, - snapshotsService, actionFilters, indexNameExpressionResolver - )); - client.initialize(actions, transportService.getTaskManager(), - () -> clusterService.localNode().getId(), transportService.getRemoteClusterService()); - } - - private Repository.Factory getRepoFactory(Environment environment) { - // Run half the tests with the eventually consistent repository - if (blobStoreContext == null) { - return metaData -> { - final Repository repository = new FsRepository(metaData, environment, xContentRegistry(), threadPool) { - @Override - protected void assertSnapshotOrGenericThread() { - // eliminate thread name check as we create repo in the test thread - } - }; - repository.start(); - return repository; - }; - } else { - return metaData -> { - final Repository repository = new MockEventuallyConsistentRepository( - metaData, xContentRegistry(), deterministicTaskQueue.getThreadPool(), blobStoreContext); - repository.start(); - return repository; - }; } - } - public void restart() { - testClusterNodes.disconnectNode(this); - final ClusterState oldState = this.clusterService.state(); - stop(); - testClusterNodes.nodes.remove(node.getName()); - scheduleSoon(() -> { - try { - final TestClusterNode restartedNode = new TestClusterNode( - new DiscoveryNode(node.getName(), node.getId(), node.getAddress(), emptyMap(), - node.getRoles(), Version.CURRENT), disruption); - testClusterNodes.nodes.put(node.getName(), restartedNode); - restartedNode.start(oldState); - } catch (IOException e) { - throw new AssertionError(e); + + public void stop() { + testClusterNodes.disconnectNode(this); + indicesService.close(); + clusterService.close(); + indicesClusterStateService.close(); + if (coordinator != null) { + coordinator.close(); } - }); - } - - public void stop() { - testClusterNodes.disconnectNode(this); - indicesService.close(); - clusterService.close(); - indicesClusterStateService.close(); - if (coordinator != null) { - coordinator.close(); + nodeEnv.close(); } - nodeEnv.close(); - } - public void start(ClusterState initialState) { - transportService.start(); - transportService.acceptIncomingRequests(); - snapshotsService.start(); - snapshotShardsService.start(); - final CoordinationState.PersistedState persistedState = - new InMemoryPersistedState(initialState.term(), stateForNode(initialState, node)); - coordinator = new Coordinator(node.getName(), clusterService.getSettings(), - clusterService.getClusterSettings(), transportService, namedWriteableRegistry, - allocationService, masterService, () -> persistedState, - hostsResolver -> testClusterNodes.nodes.values().stream().filter(n -> n.node.isMasterNode()) - .map(n -> n.node.getAddress()).collect(Collectors.toList()), - clusterService.getClusterApplierService(), Collections.emptyList(), random(), - new BatchedRerouteService(clusterService, allocationService::reroute), ElectionStrategy.DEFAULT_INSTANCE); - masterService.setClusterStatePublisher(coordinator); - coordinator.start(); - masterService.start(); - clusterService.getClusterApplierService().setNodeConnectionsService( - new NodeConnectionsService(clusterService.getSettings(), threadPool, transportService)); - clusterService.getClusterApplierService().start(); - indicesService.start(); - indicesClusterStateService.start(); - coordinator.startInitialJoin(); - } - } - - private final class DisconnectedNodes extends NetworkDisruption.DisruptedLinks { - - /** - * Node names that are disconnected from all other nodes. - */ - private final Set disconnected = new HashSet<>(); - - @Override - public boolean disrupt(String node1, String node2) { - if (node1.equals(node2)) { - return false; + public void start(ClusterState initialState) { + transportService.start(); + transportService.acceptIncomingRequests(); + snapshotsService.start(); + snapshotShardsService.start(); + final CoordinationState.PersistedState persistedState = + new InMemoryPersistedState(initialState.term(), stateForNode(initialState, node)); + coordinator = new Coordinator(node.getName(), clusterService.getSettings(), + clusterService.getClusterSettings(), transportService, namedWriteableRegistry, + allocationService, masterService, () -> persistedState, + hostsResolver -> nodes.values().stream().filter(n -> n.node.isMasterNode()) + .map(n -> n.node.getAddress()).collect(Collectors.toList()), + clusterService.getClusterApplierService(), Collections.emptyList(), random(), + new BatchedRerouteService(clusterService, allocationService::reroute), ElectionStrategy.DEFAULT_INSTANCE); + masterService.setClusterStatePublisher(coordinator); + coordinator.start(); + masterService.start(); + clusterService.getClusterApplierService().setNodeConnectionsService( + new NodeConnectionsService(clusterService.getSettings(), threadPool, transportService)); + clusterService.getClusterApplierService().start(); + indicesService.start(); + indicesClusterStateService.start(); + coordinator.startInitialJoin(); } - // Check if both nodes are still part of the cluster - if (testClusterNodes.nodes.containsKey(node1) == false - || testClusterNodes.nodes.containsKey(node2) == false) { - return true; - } - return disconnected.contains(node1) || disconnected.contains(node2); - } - - public void disconnect(String node) { - disconnected.add(node); - } - - public void clear() { - disconnected.clear(); } } } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotsInProgressSerializationTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotsInProgressSerializationTests.java index 46404af9f403..5d927249804b 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotsInProgressSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotsInProgressSerializationTests.java @@ -68,7 +68,7 @@ public class SnapshotsInProgressSerializationTests extends AbstractDiffableWireS String nodeId = randomAlphaOfLength(10); ShardState shardState = randomFrom(ShardState.values()); builder.put(shardId, new SnapshotsInProgress.ShardSnapshotStatus(nodeId, shardState, - shardState.failed() ? randomAlphaOfLength(10) : null)); + shardState.failed() ? randomAlphaOfLength(10) : null, "1")); } ImmutableOpenMap shards = builder.build(); return new Entry(snapshot, includeGlobalState, partial, state, indices, startTime, repositoryStateId, shards, diff --git a/server/src/test/java/org/elasticsearch/threadpool/ScheduleWithFixedDelayTests.java b/server/src/test/java/org/elasticsearch/threadpool/ScheduleWithFixedDelayTests.java index 785552124ea2..eb52b6334a7f 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ScheduleWithFixedDelayTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ScheduleWithFixedDelayTests.java @@ -178,7 +178,7 @@ public class ScheduleWithFixedDelayTests extends ESTestCase { // rarely wait and make sure the runnable didn't run at the next interval if (rarely()) { - assertFalse(awaitBusy(runAfterDone::get, 1L, TimeUnit.SECONDS)); + assertBusy(() -> assertFalse("Runnable was run after being cancelled", runAfterDone.get()), 1L, TimeUnit.SECONDS); } } @@ -283,10 +283,10 @@ public class ScheduleWithFixedDelayTests extends ESTestCase { assertThat(counterValue, equalTo(iterations)); if (rarely()) { - awaitBusy(() -> { - final int value = counter.get(); - return value == iterations; - }, 5 * interval.millis(), TimeUnit.MILLISECONDS); + assertBusy( + () -> assertThat(counter.get(), equalTo(iterations)), + 5 * interval.millis(), + TimeUnit.MILLISECONDS); } } diff --git a/server/src/test/java/org/elasticsearch/transport/ConnectionManagerTests.java b/server/src/test/java/org/elasticsearch/transport/ConnectionManagerTests.java index 4677567ab5a2..f34d39ae7252 100644 --- a/server/src/test/java/org/elasticsearch/transport/ConnectionManagerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/ConnectionManagerTests.java @@ -77,12 +77,12 @@ public class ConnectionManagerTests extends ESTestCase { AtomicInteger nodeDisconnectedCount = new AtomicInteger(); connectionManager.addListener(new TransportConnectionListener() { @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { nodeConnectedCount.incrementAndGet(); } @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { nodeDisconnectedCount.incrementAndGet(); } }); @@ -151,10 +151,11 @@ public class ConnectionManagerTests extends ESTestCase { } }; - CyclicBarrier barrier = new CyclicBarrier(11); List threads = new ArrayList<>(); AtomicInteger nodeConnectedCount = new AtomicInteger(); AtomicInteger nodeFailureCount = new AtomicInteger(); + + CyclicBarrier barrier = new CyclicBarrier(11); for (int i = 0; i < 10; i++) { Thread thread = new Thread(() -> { try { @@ -166,6 +167,9 @@ public class ConnectionManagerTests extends ESTestCase { connectionManager.connectToNode(node, connectionProfile, validator, ActionListener.wrap(c -> { nodeConnectedCount.incrementAndGet(); + if (connectionManager.nodeConnected(node) == false) { + throw new AssertionError("Expected node to be connected"); + } assert latch.getCount() == 1; latch.countDown(); }, e -> { @@ -200,12 +204,12 @@ public class ConnectionManagerTests extends ESTestCase { AtomicInteger nodeDisconnectedCount = new AtomicInteger(); connectionManager.addListener(new TransportConnectionListener() { @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { nodeConnectedCount.incrementAndGet(); } @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { nodeDisconnectedCount.incrementAndGet(); } }); @@ -240,12 +244,12 @@ public class ConnectionManagerTests extends ESTestCase { AtomicInteger nodeDisconnectedCount = new AtomicInteger(); connectionManager.addListener(new TransportConnectionListener() { @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { nodeConnectedCount.incrementAndGet(); } @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { nodeDisconnectedCount.incrementAndGet(); } }); @@ -289,7 +293,6 @@ public class ConnectionManagerTests extends ESTestCase { @Override public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options) throws TransportException { - } } } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java index 3d3475121606..2d0097b43a71 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java @@ -87,7 +87,7 @@ public class RemoteClusterClientTests extends ESTestCase { service.getRemoteClusterService().getConnections().forEach(con -> { con.getConnectionManager().addListener(new TransportConnectionListener() { @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { if (remoteNode.equals(node)) { semaphore.release(); } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 2d0e1bb1aa2c..92eb60cfdaf6 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -101,6 +101,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; import static org.hamcrest.Matchers.startsWith; +import static org.mockito.Mockito.mock; public class RemoteClusterConnectionTests extends ESTestCase { @@ -344,14 +345,14 @@ public class RemoteClusterConnectionTests extends ESTestCase { CountDownLatch latchConnected = new CountDownLatch(1); connectionManager.addListener(new TransportConnectionListener() { @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { if (node.equals(discoverableNode)) { latchDisconnect.countDown(); } } @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { if (node.equals(spareNode)) { latchConnected.countDown(); } @@ -480,7 +481,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { ConnectionManager delegate = new ConnectionManager(Settings.EMPTY, service.transport); StubbableConnectionManager connectionManager = new StubbableConnectionManager(delegate, Settings.EMPTY, service.transport); - connectionManager.addConnectBehavior(seedNode.getAddress(), (cm, discoveryNode) -> { + connectionManager.addGetConnectionBehavior(seedNode.getAddress(), (cm, discoveryNode) -> { if (discoveryNode == seedNode) { return seedConnection; } @@ -958,8 +959,8 @@ public class RemoteClusterConnectionTests extends ESTestCase { barrier.await(); for (int j = 0; j < numGetCalls; j++) { try { - DiscoveryNode node = connection.getAnyConnectedNode(); - assertNotNull(node); + Transport.Connection lowLevelConnection = connection.getConnection(); + assertNotNull(lowLevelConnection); } catch (NoSuchRemoteClusterException e) { // ignore, this is an expected exception } @@ -989,7 +990,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { ActionListener.map(fut, x -> null))); } else { DiscoveryNode node = randomFrom(discoverableNodes).v2().get(); - connection.onNodeDisconnected(node); + connection.onNodeDisconnected(node, mock(Transport.Connection.class)); } } } catch (Exception ex) { @@ -1097,14 +1098,14 @@ public class RemoteClusterConnectionTests extends ESTestCase { ConnectionManager delegate = new ConnectionManager(Settings.EMPTY, service.transport); StubbableConnectionManager connectionManager = new StubbableConnectionManager(delegate, Settings.EMPTY, service.transport); - connectionManager.setDefaultNodeConnectedBehavior(cm -> Collections.singleton(connectedNode)); + connectionManager.setDefaultNodeConnectedBehavior((cm, node) -> connectedNode.equals(node)); - connectionManager.addConnectBehavior(connectedNode.getAddress(), (cm, discoveryNode) -> { - if (discoveryNode == connectedNode) { - return seedConnection; - } - return cm.getConnection(discoveryNode); + connectionManager.addGetConnectionBehavior(connectedNode.getAddress(), (cm, discoveryNode) -> seedConnection); + + connectionManager.addGetConnectionBehavior(disconnectedNode.getAddress(), (cm, discoveryNode) -> { + throw new NodeNotConnectedException(discoveryNode, ""); }); + service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", @@ -1118,13 +1119,13 @@ public class RemoteClusterConnectionTests extends ESTestCase { for (int i = 0; i < 10; i++) { // we don't use the transport service connection manager so we will get a proxy connection for the local node Transport.Connection remoteConnection = connection.getConnection(service.getLocalNode()); - assertThat(remoteConnection, instanceOf(RemoteClusterConnection.ProxyConnection.class)); + assertThat(remoteConnection, instanceOf(RemoteConnectionManager.ProxyConnection.class)); assertThat(remoteConnection.getNode(), equalTo(service.getLocalNode())); } for (int i = 0; i < 10; i++) { //always a proxy connection as the target node is not connected Transport.Connection remoteConnection = connection.getConnection(disconnectedNode); - assertThat(remoteConnection, instanceOf(RemoteClusterConnection.ProxyConnection.class)); + assertThat(remoteConnection, instanceOf(RemoteConnectionManager.ProxyConnection.class)); assertThat(remoteConnection.getNode(), sameInstance(disconnectedNode)); } } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java new file mode 100644 index 000000000000..e2d33a6263a4 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java @@ -0,0 +1,117 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.test.ESTestCase; + +import java.net.InetAddress; +import java.util.HashSet; +import java.util.Set; + +import static org.hamcrest.Matchers.hasItems; +import static org.hamcrest.core.IsInstanceOf.instanceOf; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; + +public class RemoteConnectionManagerTests extends ESTestCase { + + private Transport transport; + private RemoteConnectionManager remoteConnectionManager; + private ConnectionManager.ConnectionValidator validator = (connection, profile, listener) -> listener.onResponse(null); + + @Override + public void setUp() throws Exception { + super.setUp(); + transport = mock(Transport.class); + remoteConnectionManager = new RemoteConnectionManager("remote-cluster", new ConnectionManager(Settings.EMPTY, transport)); + } + + @SuppressWarnings("unchecked") + public void testGetConnection() { + TransportAddress address = new TransportAddress(InetAddress.getLoopbackAddress(), 1000); + + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(new TestRemoteConnection((DiscoveryNode) invocationOnMock.getArguments()[0])); + return null; + }).when(transport).openConnection(any(DiscoveryNode.class), any(ConnectionProfile.class), any(ActionListener.class)); + + DiscoveryNode node1 = new DiscoveryNode("node-1", address, Version.CURRENT); + PlainActionFuture future1 = PlainActionFuture.newFuture(); + remoteConnectionManager.connectToNode(node1, null, validator, future1); + assertTrue(future1.isDone()); + + // Add duplicate connect attempt to ensure that we do not get duplicate connections in the round robin + remoteConnectionManager.connectToNode(node1, null, validator, PlainActionFuture.newFuture()); + + DiscoveryNode node2 = new DiscoveryNode("node-2", address, Version.CURRENT.minimumCompatibilityVersion()); + PlainActionFuture future2 = PlainActionFuture.newFuture(); + remoteConnectionManager.connectToNode(node2, null, validator, future2); + assertTrue(future2.isDone()); + + assertEquals(node1, remoteConnectionManager.getRemoteConnection(node1).getNode()); + assertEquals(node2, remoteConnectionManager.getRemoteConnection(node2).getNode()); + + DiscoveryNode node4 = new DiscoveryNode("node-4", address, Version.CURRENT); + assertThat(remoteConnectionManager.getRemoteConnection(node4), instanceOf(RemoteConnectionManager.ProxyConnection.class)); + + // Test round robin + Set versions = new HashSet<>(); + versions.add(remoteConnectionManager.getRemoteConnection(node4).getVersion()); + versions.add(remoteConnectionManager.getRemoteConnection(node4).getVersion()); + + assertThat(versions, hasItems(Version.CURRENT, Version.CURRENT.minimumCompatibilityVersion())); + + // Test that the connection is cleared from the round robin list when it is closed + remoteConnectionManager.getRemoteConnection(node1).close(); + + versions.clear(); + versions.add(remoteConnectionManager.getRemoteConnection(node4).getVersion()); + versions.add(remoteConnectionManager.getRemoteConnection(node4).getVersion()); + + assertThat(versions, hasItems(Version.CURRENT.minimumCompatibilityVersion())); + assertEquals(1, versions.size()); + } + + private static class TestRemoteConnection extends CloseableConnection { + + private final DiscoveryNode node; + + private TestRemoteConnection(DiscoveryNode node) { + this.node = node; + } + + @Override + public DiscoveryNode getNode() { + return node; + } + + @Override + public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options) + throws TransportException { + } + } +} diff --git a/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java index c170ca76c49d..eaab4b3f44cc 100644 --- a/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java +++ b/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java @@ -270,7 +270,6 @@ public class SimpleValidateQueryIT extends ESIntegTestCase { private static void assertExplanation(QueryBuilder queryBuilder, Matcher matcher, boolean withRewrite) { ValidateQueryResponse response = client().admin().indices().prepareValidateQuery("test") - .setTypes("type1") .setQuery(queryBuilder) .setExplain(true) .setRewrite(withRewrite) @@ -285,7 +284,6 @@ public class SimpleValidateQueryIT extends ESIntegTestCase { List> matchers, boolean withRewrite, boolean allShards) { ValidateQueryResponse response = client().admin().indices().prepareValidateQuery("test") - .setTypes("type1") .setQuery(queryBuilder) .setExplain(true) .setRewrite(withRewrite) @@ -307,9 +305,8 @@ public class SimpleValidateQueryIT extends ESIntegTestCase { .setSource("followers", new int[] {1, 2, 3}).get(); refresh(); - TermsQueryBuilder termsLookupQuery = QueryBuilders.termsLookupQuery("user", new TermsLookup("twitter", "_doc", "1", "followers")); + TermsQueryBuilder termsLookupQuery = QueryBuilders.termsLookupQuery("user", new TermsLookup("twitter", "1", "followers")); ValidateQueryResponse response = client().admin().indices().prepareValidateQuery("twitter") - .setTypes("_doc") .setQuery(termsLookupQuery) .setExplain(true) .execute().actionGet(); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index 595328b309c0..793e5b5f2f90 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -703,9 +703,8 @@ public class AbstractCoordinatorTestCase extends ESTestCase { if (rarely()) { nodeEnvironment = newNodeEnvironment(); nodeEnvironments.add(nodeEnvironment); - final MockGatewayMetaState gatewayMetaState - = new MockGatewayMetaState(Settings.EMPTY, nodeEnvironment, xContentRegistry(), localNode); - gatewayMetaState.start(); + final MockGatewayMetaState gatewayMetaState = new MockGatewayMetaState(localNode); + gatewayMetaState.start(Settings.EMPTY, nodeEnvironment, xContentRegistry()); delegate = gatewayMetaState.getPersistedState(); } else { nodeEnvironment = null; @@ -736,9 +735,8 @@ public class AbstractCoordinatorTestCase extends ESTestCase { new Manifest(updatedTerm, manifest.getClusterStateVersion(), manifest.getGlobalGeneration(), manifest.getIndexGenerations())); } - final MockGatewayMetaState gatewayMetaState - = new MockGatewayMetaState(Settings.EMPTY, nodeEnvironment, xContentRegistry(), newLocalNode); - gatewayMetaState.start(); + final MockGatewayMetaState gatewayMetaState = new MockGatewayMetaState(newLocalNode); + gatewayMetaState.start(Settings.EMPTY, nodeEnvironment, xContentRegistry()); delegate = gatewayMetaState.getPersistedState(); } else { nodeEnvironment = null; @@ -1174,6 +1172,10 @@ public class AbstractCoordinatorTestCase extends ESTestCase { private boolean isNotUsefullyBootstrapped() { return getLocalNode().isMasterNode() == false || coordinator.isInitialConfigurationSet() == false; } + + void allowClusterStateApplicationFailure() { + clusterApplierService.allowClusterStateApplicationFailure(); + } } private List provideSeedHosts(SeedHostsProvider.HostsResolver ignored) { @@ -1284,6 +1286,7 @@ public class AbstractCoordinatorTestCase extends ESTestCase { private final String nodeName; private final DeterministicTaskQueue deterministicTaskQueue; ClusterStateApplyResponse clusterStateApplyResponse = ClusterStateApplyResponse.SUCCEED; + private boolean applicationMayFail; DisruptableClusterApplierService(String nodeName, Settings settings, ClusterSettings clusterSettings, DeterministicTaskQueue deterministicTaskQueue, Function runnableWrapper) { @@ -1328,6 +1331,15 @@ public class AbstractCoordinatorTestCase extends ESTestCase { protected void connectToNodesAndWait(ClusterState newClusterState) { // don't do anything, and don't block } + + @Override + protected boolean applicationMayFail() { + return this.applicationMayFail; + } + + void allowClusterStateApplicationFailure() { + this.applicationMayFail = true; + } } protected DiscoveryNode createDiscoveryNode(int nodeIndex, boolean masterEligible) { diff --git a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java index 586303c23c4b..8a0fbe0a4958 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java +++ b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.util; import com.carrotsearch.randomizedtesting.RandomizedContext; import com.carrotsearch.randomizedtesting.SeedUtils; - import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; import org.apache.lucene.util.BytesRef; @@ -29,7 +28,6 @@ import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.indices.breaker.CircuitBreakerService; -import org.elasticsearch.test.ESTestCase; import java.util.Collection; import java.util.Collections; @@ -41,6 +39,9 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.test.ESTestCase.assertBusy; +import static org.junit.Assert.assertTrue; + public class MockBigArrays extends BigArrays { /** @@ -57,8 +58,9 @@ public class MockBigArrays extends BigArrays { // not empty, we might be executing on a shared cluster that keeps on obtaining // and releasing arrays, lets make sure that after a reasonable timeout, all master // copy (snapshot) have been released - boolean success = ESTestCase.awaitBusy(() -> Sets.haveEmptyIntersection(masterCopy.keySet(), ACQUIRED_ARRAYS.keySet())); - if (!success) { + try { + assertBusy(() -> assertTrue(Sets.haveEmptyIntersection(masterCopy.keySet(), ACQUIRED_ARRAYS.keySet()))); + } catch (AssertionError ex) { masterCopy.keySet().retainAll(ACQUIRED_ARRAYS.keySet()); ACQUIRED_ARRAYS.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on if (!masterCopy.isEmpty()) { diff --git a/test/framework/src/main/java/org/elasticsearch/common/util/MockPageCacheRecycler.java b/test/framework/src/main/java/org/elasticsearch/common/util/MockPageCacheRecycler.java index c20268889296..1bb432360313 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/util/MockPageCacheRecycler.java +++ b/test/framework/src/main/java/org/elasticsearch/common/util/MockPageCacheRecycler.java @@ -23,7 +23,6 @@ import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.common.recycler.Recycler.V; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.test.ESTestCase; import java.lang.reflect.Array; import java.util.Arrays; @@ -34,6 +33,8 @@ import java.util.Random; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import static org.elasticsearch.test.ESTestCase.waitUntil; + public class MockPageCacheRecycler extends PageCacheRecycler { private static final ConcurrentMap ACQUIRED_PAGES = new ConcurrentHashMap<>(); @@ -44,8 +45,8 @@ public class MockPageCacheRecycler extends PageCacheRecycler { // not empty, we might be executing on a shared cluster that keeps on obtaining // and releasing pages, lets make sure that after a reasonable timeout, all master // copy (snapshot) have been released - boolean success = - ESTestCase.awaitBusy(() -> Sets.haveEmptyIntersection(masterCopy.keySet(), ACQUIRED_PAGES.keySet())); + final boolean success = + waitUntil(() -> Sets.haveEmptyIntersection(masterCopy.keySet(), ACQUIRED_PAGES.keySet())); if (!success) { masterCopy.keySet().retainAll(ACQUIRED_PAGES.keySet()); ACQUIRED_PAGES.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on diff --git a/test/framework/src/main/java/org/elasticsearch/gateway/MockGatewayMetaState.java b/test/framework/src/main/java/org/elasticsearch/gateway/MockGatewayMetaState.java index 006f29488312..b73a90b42848 100644 --- a/test/framework/src/main/java/org/elasticsearch/gateway/MockGatewayMetaState.java +++ b/test/framework/src/main/java/org/elasticsearch/gateway/MockGatewayMetaState.java @@ -19,15 +19,21 @@ package org.elasticsearch.gateway; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugins.MetaDataUpgrader; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + /** * {@link GatewayMetaState} constructor accepts a lot of arguments. * It's not always easy / convenient to construct these dependencies. @@ -37,24 +43,29 @@ import org.elasticsearch.transport.TransportService; public class MockGatewayMetaState extends GatewayMetaState { private final DiscoveryNode localNode; - public MockGatewayMetaState(Settings settings, NodeEnvironment nodeEnvironment, - NamedXContentRegistry xContentRegistry, DiscoveryNode localNode) { - super(settings, new MetaStateService(nodeEnvironment, xContentRegistry)); + public MockGatewayMetaState(DiscoveryNode localNode) { this.localNode = localNode; } @Override - protected void upgradeMetaData(MetaDataIndexUpgradeService metaDataIndexUpgradeService, MetaDataUpgrader metaDataUpgrader) { + void upgradeMetaData(Settings settings, MetaStateService metaStateService, MetaDataIndexUpgradeService metaDataIndexUpgradeService, + MetaDataUpgrader metaDataUpgrader) { // MetaData upgrade is tested in GatewayMetaStateTests, we override this method to NOP to make mocking easier } @Override - public void applyClusterStateUpdaters(TransportService transportService, ClusterService clusterService) { + ClusterState prepareInitialClusterState(TransportService transportService, ClusterService clusterService, ClusterState clusterState) { // Just set localNode here, not to mess with ClusterService and IndicesService mocking - previousClusterState = ClusterStateUpdaters.setLocalNode(previousClusterState, localNode); + return ClusterStateUpdaters.setLocalNode(clusterState, localNode); } - public void start() { - start(null, null, null, null); + public void start(Settings settings, NodeEnvironment nodeEnvironment, NamedXContentRegistry xContentRegistry) { + final TransportService transportService = mock(TransportService.class); + when(transportService.getThreadPool()).thenReturn(mock(ThreadPool.class)); + final ClusterService clusterService = mock(ClusterService.class); + when(clusterService.getClusterSettings()) + .thenReturn(new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + start(settings, transportService, clusterService, new MetaStateService(nodeEnvironment, xContentRegistry), + null, null); } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index cce9780b0922..35da786474aa 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.shard; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexNotFoundException; import org.apache.lucene.store.Directory; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.flush.FlushRequest; @@ -695,20 +694,6 @@ public abstract class IndexShardTestCase extends ESTestCase { inSyncIds, newRoutingTable); } - private Store.MetadataSnapshot getMetadataSnapshotOrEmpty(IndexShard replica) throws IOException { - Store.MetadataSnapshot result; - try { - result = replica.snapshotStoreMetadata(); - } catch (IndexNotFoundException e) { - // OK! - result = Store.MetadataSnapshot.EMPTY; - } catch (IOException e) { - logger.warn("failed read store, treating as empty", e); - result = Store.MetadataSnapshot.EMPTY; - } - return result; - } - public static Set getShardDocUIDs(final IndexShard shard) throws IOException { return getDocIdAndSeqNos(shard).stream().map(DocIdSeqNoAndSource::getId).collect(Collectors.toSet()); } @@ -827,25 +812,30 @@ public abstract class IndexShardTestCase extends ESTestCase { shard.recoveryState()); } - /** Snapshot a shard using a given repository **/ - protected void snapshotShard(final IndexShard shard, - final Snapshot snapshot, - final Repository repository) throws IOException { - final IndexShardSnapshotStatus snapshotStatus = IndexShardSnapshotStatus.newInitializing(); - final PlainActionFuture future = PlainActionFuture.newFuture(); + /** + * Snapshot a shard using a given repository. + * + * @return new shard generation + */ + protected String snapshotShard(final IndexShard shard, + final Snapshot snapshot, + final Repository repository) throws IOException { + final Index index = shard.shardId().getIndex(); + final IndexId indexId = new IndexId(index.getName(), index.getUUID()); + final IndexShardSnapshotStatus snapshotStatus = IndexShardSnapshotStatus.newInitializing(null); + final PlainActionFuture future = PlainActionFuture.newFuture(); + final String shardGen; try (Engine.IndexCommitRef indexCommitRef = shard.acquireLastIndexCommit(true)) { - Index index = shard.shardId().getIndex(); - IndexId indexId = new IndexId(index.getName(), index.getUUID()); - repository.snapshotShard(shard.store(), shard.mapperService(), snapshot.getSnapshotId(), indexId, indexCommitRef.getIndexCommit(), snapshotStatus, future); - future.actionGet(); + shardGen = future.actionGet(); } final IndexShardSnapshotStatus.Copy lastSnapshotStatus = snapshotStatus.asCopy(); assertEquals(IndexShardSnapshotStatus.Stage.DONE, lastSnapshotStatus.getStage()); assertEquals(shard.snapshotStoreMetadata().size(), lastSnapshotStatus.getTotalFileCount()); assertNull(lastSnapshotStatus.getFailure()); + return shardGen; } /** diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java b/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java index 417e4e98649a..d68004eff4ad 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java @@ -135,7 +135,7 @@ public abstract class RestoreOnlyRepository extends AbstractLifecycleComponent i @Override public void snapshotShard(Store store, MapperService mapperService, SnapshotId snapshotId, IndexId indexId, - IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, ActionListener listener) { + IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, ActionListener listener) { } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java index 304a3963ff4e..3839f67373cf 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java @@ -34,7 +34,6 @@ import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.TreeSet; -import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -51,18 +50,7 @@ public abstract class AnalysisFactoryTestCase extends ESTestCase { private static final Pattern UNDERSCORE_THEN_ANYTHING = Pattern.compile("_(.)"); - private static String toCamelCase(String s) { - Matcher m = UNDERSCORE_THEN_ANYTHING.matcher(s); - StringBuffer sb = new StringBuffer(); - while (m.find()) { - m.appendReplacement(sb, m.group(1).toUpperCase(Locale.ROOT)); - } - m.appendTail(sb); - sb.setCharAt(0, Character.toUpperCase(sb.charAt(0))); - return sb.toString(); - } - - static final Map> KNOWN_TOKENIZERS = Map.ofEntries( + private static final Map> KNOWN_TOKENIZERS = Map.ofEntries( // exposed in ES entry("classic", MovedToAnalysisCommon.class), entry("edgengram", MovedToAnalysisCommon.class), diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java index 21880d136838..e47bdeee3c22 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java @@ -165,7 +165,7 @@ public abstract class ESMockAPIBasedRepositoryIntegTestCase extends ESBlobStoreR } } - private void handleAsError(final HttpExchange exchange) throws IOException { + protected void handleAsError(final HttpExchange exchange) throws IOException { Streams.readFully(exchange.getRequestBody()); exchange.sendResponseHeaders(HttpStatus.SC_INTERNAL_SERVER_ERROR, -1); exchange.close(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index 69cfc1f6dc0c..28e058947d4c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -49,7 +49,6 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; -import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; @@ -74,7 +73,6 @@ import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchModule; -import org.elasticsearch.search.internal.SearchContext; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -232,22 +230,6 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { serviceHolderWithNoType.clientInvocationHandler.delegate = this; } - protected static SearchContext getSearchContext(QueryShardContext context) { - TestSearchContext testSearchContext = new TestSearchContext(context) { - @Override - public MapperService mapperService() { - return serviceHolder.mapperService; // need to build / parse inner hits sort fields - } - - @Override - public > IFD getForField(MappedFieldType fieldType) { - return serviceHolder.indexFieldDataService.getForField(fieldType); // need to build / parse inner hits sort fields - } - - }; - return testSearchContext; - } - @After public void afterTest() { serviceHolder.clientInvocationHandler.delegate = null; diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index 29c3fc5a27b6..2fd1bf450f51 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -54,7 +54,6 @@ import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.query.support.QueryParsers; -import org.elasticsearch.search.internal.SearchContext; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -422,14 +421,13 @@ public abstract class AbstractQueryTestCase> context.setAllowUnmappedFields(true); QB firstQuery = createTestQueryBuilder(); QB controlQuery = copyQuery(firstQuery); - SearchContext searchContext = getSearchContext(context); /* we use a private rewrite context here since we want the most realistic way of asserting that we are cacheable or not. * We do it this way in SearchService where * we first rewrite the query with a private context, then reset the context and then build the actual lucene query*/ QueryBuilder rewritten = rewriteQuery(firstQuery, new QueryShardContext(context)); Query firstLuceneQuery = rewritten.toQuery(context); assertNotNull("toQuery should not return null", firstLuceneQuery); - assertLuceneQuery(firstQuery, firstLuceneQuery, searchContext); + assertLuceneQuery(firstQuery, firstLuceneQuery, context); //remove after assertLuceneQuery since the assertLuceneQuery impl might access the context as well assertTrue( "query is not equal to its copy after calling toQuery, firstQuery: " + firstQuery + ", secondQuery: " + controlQuery, @@ -445,10 +443,10 @@ public abstract class AbstractQueryTestCase> secondQuery.queryName(secondQuery.queryName() == null ? randomAlphaOfLengthBetween(1, 30) : secondQuery.queryName() + randomAlphaOfLengthBetween(1, 10)); } - searchContext = getSearchContext(context); + context = new QueryShardContext(context); Query secondLuceneQuery = rewriteQuery(secondQuery, context).toQuery(context); assertNotNull("toQuery should not return null", secondLuceneQuery); - assertLuceneQuery(secondQuery, secondLuceneQuery, searchContext); + assertLuceneQuery(secondQuery, secondLuceneQuery, context); if (builderGeneratesCacheableQueries()) { assertEquals("two equivalent query builders lead to different lucene queries", @@ -494,11 +492,11 @@ public abstract class AbstractQueryTestCase> /** * Checks the result of {@link QueryBuilder#toQuery(QueryShardContext)} given the original {@link QueryBuilder} * and {@link QueryShardContext}. Verifies that named queries and boost are properly handled and delegates to - * {@link #doAssertLuceneQuery(AbstractQueryBuilder, Query, SearchContext)} for query specific checks. + * {@link #doAssertLuceneQuery(AbstractQueryBuilder, Query, QueryShardContext)} for query specific checks. */ - private void assertLuceneQuery(QB queryBuilder, Query query, SearchContext context) throws IOException { + private void assertLuceneQuery(QB queryBuilder, Query query, QueryShardContext context) throws IOException { if (queryBuilder.queryName() != null) { - Query namedQuery = context.getQueryShardContext().copyNamedQueries().get(queryBuilder.queryName()); + Query namedQuery = context.copyNamedQueries().get(queryBuilder.queryName()); assertThat(namedQuery, equalTo(query)); } if (query != null) { @@ -522,7 +520,7 @@ public abstract class AbstractQueryTestCase> * Checks the result of {@link QueryBuilder#toQuery(QueryShardContext)} given the original {@link QueryBuilder} * and {@link QueryShardContext}. Contains the query specific checks to be implemented by subclasses. */ - protected abstract void doAssertLuceneQuery(QB queryBuilder, Query query, SearchContext context) throws IOException; + protected abstract void doAssertLuceneQuery(QB queryBuilder, Query query, QueryShardContext context) throws IOException; protected void assertTermOrBoostQuery(Query query, String field, String value, float fieldBoost) { if (fieldBoost != AbstractQueryBuilder.DEFAULT_BOOST) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java index d46c09e12621..e1fa74c5b317 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java +++ b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java @@ -1,4 +1,4 @@ -package org.elasticsearch.test;/* +/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright @@ -17,6 +17,8 @@ package org.elasticsearch.test;/* * under the License. */ +package org.elasticsearch.test; + import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomStrings; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 1ee607aaa3be..2109c9d3eff7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -164,8 +164,6 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicLong; -import java.util.function.BooleanSupplier; import java.util.function.Function; import java.util.stream.Collectors; @@ -186,6 +184,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTi import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.startsWith; @@ -798,8 +797,7 @@ public abstract class ESIntegTestCase extends ESTestCase { sb.append(value).append("] results. expected [").append(expectedResults).append("]"); String failMsg = sb.toString(); for (SearchHit hit : searchResponse.getHits().getHits()) { - sb.append("\n-> _index: [").append(hit.getIndex()).append("] type [").append(hit.getType()) - .append("] id [").append(hit.getId()).append("]"); + sb.append("\n-> _index: [").append(hit.getIndex()).append("] id [").append(hit.getId()).append("]"); } logger.warn("{}", sb); fail(failMsg); @@ -930,67 +928,50 @@ public abstract class ESIntegTestCase extends ESTestCase { * Waits until at least a give number of document is visible for searchers * * @param numDocs number of documents to wait for - * @param indexer a {@link org.elasticsearch.test.BackgroundIndexer}. If supplied it will be first checked for documents indexed. + * @param indexer a {@link org.elasticsearch.test.BackgroundIndexer}. It will be first checked for documents indexed. * This saves on unneeded searches. - * @return the actual number of docs seen. */ - public long waitForDocs(final long numDocs, @Nullable final BackgroundIndexer indexer) throws InterruptedException { + public void waitForDocs(final long numDocs, final BackgroundIndexer indexer) throws Exception { // indexing threads can wait for up to ~1m before retrying when they first try to index into a shard which is not STARTED. - return waitForDocs(numDocs, 90, TimeUnit.SECONDS, indexer); - } + final long maxWaitTimeMs = Math.max(90 * 1000, 200 * numDocs); - /** - * Waits until at least a give number of document is visible for searchers - * - * @param numDocs number of documents to wait for - * @param maxWaitTime if not progress have been made during this time, fail the test - * @param maxWaitTimeUnit the unit in which maxWaitTime is specified - * @param indexer If supplied it will be first checked for documents indexed. - * This saves on unneeded searches. - * @return the actual number of docs seen. - */ - public long waitForDocs(final long numDocs, int maxWaitTime, TimeUnit maxWaitTimeUnit, @Nullable final BackgroundIndexer indexer) - throws InterruptedException { - final AtomicLong lastKnownCount = new AtomicLong(-1); - long lastStartCount = -1; - BooleanSupplier testDocs = () -> { - if (indexer != null) { - lastKnownCount.set(indexer.totalIndexedDocs()); - } - if (lastKnownCount.get() >= numDocs) { - try { + assertBusy( + () -> { + long lastKnownCount = indexer.totalIndexedDocs(); - long count = client().prepareSearch() - .setTrackTotalHits(true) - .setSize(0) - .setQuery(matchAllQuery()) - .get() - .getHits().getTotalHits().value; + if (lastKnownCount >= numDocs) { + try { + long count = client().prepareSearch() + .setTrackTotalHits(true) + .setSize(0) + .setQuery(matchAllQuery()) + .get() + .getHits().getTotalHits().value; - if (count == lastKnownCount.get()) { - // no progress - try to refresh for the next time - client().admin().indices().prepareRefresh().get(); + if (count == lastKnownCount) { + // no progress - try to refresh for the next time + client().admin().indices().prepareRefresh().get(); + } + lastKnownCount = count; + } catch (Exception e) { // count now acts like search and barfs if all shards failed... + logger.debug("failed to executed count", e); + throw e; } - lastKnownCount.set(count); - } catch (Exception e) { // count now acts like search and barfs if all shards failed... - logger.debug("failed to executed count", e); - return false; } - logger.debug("[{}] docs visible for search. waiting for [{}]", lastKnownCount.get(), numDocs); - } else { - logger.debug("[{}] docs indexed. waiting for [{}]", lastKnownCount.get(), numDocs); - } - return lastKnownCount.get() >= numDocs; - }; - while (!awaitBusy(testDocs, maxWaitTime, maxWaitTimeUnit)) { - if (lastStartCount == lastKnownCount.get()) { - // we didn't make any progress - fail("failed to reach " + numDocs + "docs"); - } - lastStartCount = lastKnownCount.get(); - } - return lastKnownCount.get(); + if (logger.isDebugEnabled()) { + if (lastKnownCount < numDocs) { + logger.debug("[{}] docs indexed. waiting for [{}]", lastKnownCount, numDocs); + } else { + logger.debug("[{}] docs visible for search (needed [{}])", lastKnownCount, numDocs); + } + } + + assertThat(lastKnownCount, greaterThanOrEqualTo(numDocs)); + }, + maxWaitTimeMs, + TimeUnit.MILLISECONDS + ); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 857c32426c64..5a50abd3a6fc 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -845,6 +845,7 @@ public abstract class ESTestCase extends LuceneTestCase { */ public static void assertBusy(CheckedRunnable codeBlock, long maxWaitTime, TimeUnit unit) throws Exception { long maxTimeInMillis = TimeUnit.MILLISECONDS.convert(maxWaitTime, unit); + // In case you've forgotten your high-school studies, log10(x) / log10(y) == log y(x) long iterations = Math.max(Math.round(Math.log10(maxTimeInMillis) / Math.log10(2)), 1); long timeInMillis = 1; long sum = 0; @@ -872,14 +873,34 @@ public abstract class ESTestCase extends LuceneTestCase { } } - public static boolean awaitBusy(BooleanSupplier breakSupplier) throws InterruptedException { - return awaitBusy(breakSupplier, 10, TimeUnit.SECONDS); + /** + * Periodically execute the supplied function until it returns true, or a timeout + * is reached. This version uses a timeout of 10 seconds. If at all possible, + * use {@link ESTestCase#assertBusy(CheckedRunnable)} instead. + * + * @param breakSupplier determines whether to return immediately or continue waiting. + * @return the last value returned by breakSupplier + * @throws InterruptedException if any sleep calls were interrupted. + */ + public static boolean waitUntil(BooleanSupplier breakSupplier) throws InterruptedException { + return waitUntil(breakSupplier, 10, TimeUnit.SECONDS); } // After 1s, we stop growing the sleep interval exponentially and just sleep 1s until maxWaitTime private static final long AWAIT_BUSY_THRESHOLD = 1000L; - public static boolean awaitBusy(BooleanSupplier breakSupplier, long maxWaitTime, TimeUnit unit) throws InterruptedException { + /** + * Periodically execute the supplied function until it returns true, or until the + * specified maximum wait time has elapsed. If at all possible, use + * {@link ESTestCase#assertBusy(CheckedRunnable)} instead. + * + * @param breakSupplier determines whether to return immediately or continue waiting. + * @param maxWaitTime the maximum amount of time to wait + * @param unit the unit of tie for maxWaitTime + * @return the last value returned by breakSupplier + * @throws InterruptedException if any sleep calls were interrupted. + */ + public static boolean waitUntil(BooleanSupplier breakSupplier, long maxWaitTime, TimeUnit unit) throws InterruptedException { long maxTimeInMillis = TimeUnit.MILLISECONDS.convert(maxWaitTime, unit); long timeInMillis = 1; long sum = 0; diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 8410c96bbae6..d0fd121f3cb8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -146,11 +146,11 @@ import static org.elasticsearch.discovery.DiscoveryModule.ZEN2_DISCOVERY_TYPE; import static org.elasticsearch.discovery.FileBasedSeedHostsProvider.UNICAST_HOSTS_FILE; import static org.elasticsearch.node.Node.INITIAL_STATE_TIMEOUT_SETTING; import static org.elasticsearch.test.ESTestCase.assertBusy; -import static org.elasticsearch.test.ESTestCase.awaitBusy; import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.junit.Assert.assertEquals; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertFalse; @@ -1105,22 +1105,17 @@ public final class InternalTestCluster extends TestCluster { logger.trace("validating cluster formed via [{}], expecting {}", viaNode, expectedNodes); final Client client = client(viaNode); try { - if (awaitBusy(() -> { + assertBusy(() -> { DiscoveryNodes discoveryNodes = client.admin().cluster().prepareState().get().getState().nodes(); - if (discoveryNodes.getSize() != expectedNodes.size()) { - return false; - } + assertEquals(expectedNodes.size(), discoveryNodes.getSize()); for (DiscoveryNode expectedNode : expectedNodes) { - if (discoveryNodes.nodeExists(expectedNode) == false) { - return false; - } + assertTrue("Expected node to exist: " + expectedNode, discoveryNodes.nodeExists(expectedNode)); } - return true; - }, 30, TimeUnit.SECONDS) == false) { - throw new IllegalStateException("cluster failed to form with expected nodes " + expectedNodes + " and actual nodes " + - client.admin().cluster().prepareState().get().getState().nodes()); - } - } catch (InterruptedException e) { + }, 30, TimeUnit.SECONDS); + } catch (AssertionError ae) { + throw new IllegalStateException("cluster failed to form with expected nodes " + expectedNodes + " and actual nodes " + + client.admin().cluster().prepareState().get().getState().nodes()); + } catch (Exception e) { throw new IllegalStateException(e); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 97b7de893ba1..6cd451d6e405 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -21,6 +21,7 @@ package org.elasticsearch.test; import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Query; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchTask; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.unit.TimeValue; @@ -35,6 +36,7 @@ import org.elasticsearch.index.query.InnerHitContextBuilder; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.search.SearchExtBuilder; import org.elasticsearch.search.SearchShardTarget; @@ -64,6 +66,8 @@ import java.util.List; import java.util.Map; public class TestSearchContext extends SearchContext { + public static final SearchShardTarget SHARD_TARGET = + new SearchShardTarget("test", new ShardId("test", "test", 0), null, OriginalIndices.NONE); final BigArrays bigArrays; final IndexService indexService; diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 96872c6710d9..c3a8be255efc 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -221,8 +221,8 @@ public class ElasticsearchAssertions { Set idsSet = new HashSet<>(Arrays.asList(ids)); for (SearchHit hit : searchResponse.getHits()) { assertThat( - "id [" + hit.getId() + "] was found in search results but wasn't expected (type [" + hit.getType() - + "], index [" + hit.getIndex() + "])" + shardStatus, idsSet.remove(hit.getId()), + "id [" + hit.getId() + "] was found in search results but wasn't expected (index [" + + hit.getIndex() + "])" + shardStatus, idsSet.remove(hit.getId()), equalTo(true)); } assertThat("Some expected ids were not found in search results: " + Arrays.toString(idsSet.toArray(new String[idsSet.size()])) + "." @@ -458,10 +458,6 @@ public class ElasticsearchAssertions { return new ElasticsearchMatchers.SearchHitHasIdMatcher(id); } - public static Matcher hasType(final String type) { - return new ElasticsearchMatchers.SearchHitHasTypeMatcher(type); - } - public static Matcher hasIndex(final String index) { return new ElasticsearchMatchers.SearchHitHasIndexMatcher(index); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java index 333205864810..f1453a3688c5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java @@ -52,29 +52,6 @@ public class ElasticsearchMatchers { } } - public static class SearchHitHasTypeMatcher extends TypeSafeMatcher { - private String type; - - public SearchHitHasTypeMatcher(String type) { - this.type = type; - } - - @Override - public boolean matchesSafely(final SearchHit searchHit) { - return searchHit.getType().equals(type); - } - - @Override - public void describeMismatchSafely(final SearchHit searchHit, final Description mismatchDescription) { - mismatchDescription.appendText(" was ").appendValue(searchHit.getType()); - } - - @Override - public void describeTo(final Description description) { - description.appendText("searchHit type should be ").appendValue(type); - } - } - public static class SearchHitHasIndexMatcher extends TypeSafeMatcher { private String index; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java index 70665ad5d9ba..f0d1b13d98d7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java @@ -18,6 +18,12 @@ */ package org.elasticsearch.test.rest.yaml.restspec; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; + import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; @@ -30,12 +36,6 @@ import java.util.Map; import java.util.Set; import java.util.stream.Stream; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; - /** * Holds the specification used to turn {@code do} actions in the YAML suite into REST api calls. */ @@ -43,7 +43,7 @@ public class ClientYamlSuiteRestSpec { private final Set globalParameters = new HashSet<>(); private final Map restApiMap = new HashMap<>(); - private ClientYamlSuiteRestSpec() {} + ClientYamlSuiteRestSpec() {} private void addApi(ClientYamlSuiteRestApi restApi) { ClientYamlSuiteRestApi previous = restApiMap.putIfAbsent(restApi.getName(), restApi); @@ -99,27 +99,7 @@ public class ClientYamlSuiteRestSpec { JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { String filename = jsonFile.getFileName().toString(); if (filename.equals("_common.json")) { - String currentFieldName = null; - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT - && "params".equals(currentFieldName)) { - while (parser.nextToken() == XContentParser.Token.FIELD_NAME) { - String param = parser.currentName(); - if (restSpec.globalParameters.contains(param)) { - throw new IllegalArgumentException("Found duplicate global param [" + param + "]"); - } - restSpec.globalParameters.add(param); - parser.nextToken(); - if (parser.currentToken() != XContentParser.Token.START_OBJECT) { - throw new IllegalArgumentException("Expected params field in rest api definition to " + - "contain an object"); - } - parser.skipChildren(); - } - } - } + parseCommonSpec(parser, restSpec); } else { ClientYamlSuiteRestApi restApi = restApiParser.parse(jsonFile.toString(), parser); String expectedApiName = filename.substring(0, filename.lastIndexOf('.')); @@ -134,4 +114,34 @@ public class ClientYamlSuiteRestSpec { throw new UncheckedIOException("Can't parse rest spec file: [" + jsonFile + "]", ex); } } + + static void parseCommonSpec(XContentParser parser, ClientYamlSuiteRestSpec restSpec) throws IOException { + String currentFieldName = null; + parser.nextToken(); + assert parser.currentToken() == XContentParser.Token.START_OBJECT; + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + if ("params".equals(currentFieldName)) { + while (parser.nextToken() == XContentParser.Token.FIELD_NAME) { + String param = parser.currentName(); + if (restSpec.globalParameters.contains(param)) { + throw new IllegalArgumentException("Found duplicate global param [" + param + "]"); + } + restSpec.globalParameters.add(param); + parser.nextToken(); + if (parser.currentToken() != XContentParser.Token.START_OBJECT) { + throw new IllegalArgumentException("Expected params field in rest api definition to " + + "contain an object"); + } + parser.skipChildren(); + } + } else { + parser.skipChildren(); + } + } + } + + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java index 87e1f2533622..cde88bdccddb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransport.java @@ -81,7 +81,7 @@ public class MockTransport implements Transport, LifecycleComponent { @Nullable ClusterSettings clusterSettings, Set taskHeaders) { StubbableConnectionManager connectionManager = new StubbableConnectionManager(new ConnectionManager(settings, this), settings, this); - connectionManager.setDefaultNodeConnectedBehavior(cm -> Collections.emptySet()); + connectionManager.setDefaultNodeConnectedBehavior((cm, node) -> false); connectionManager.setDefaultGetConnectionBehavior((cm, discoveryNode) -> createConnection(discoveryNode)); return new TransportService(settings, this, threadPool, interceptor, localNodeFactory, clusterSettings, taskHeaders, connectionManager); diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 3ecb76b0dee5..cb509dfbf9d7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -463,7 +463,7 @@ public final class MockTransportService extends TransportService { * @return {@code true} if no other get connection behavior was registered for this address before. */ public boolean addGetConnectionBehavior(TransportAddress transportAddress, StubbableConnectionManager.GetConnectionBehavior behavior) { - return connectionManager().addConnectBehavior(transportAddress, behavior); + return connectionManager().addGetConnectionBehavior(transportAddress, behavior); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableConnectionManager.java b/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableConnectionManager.java index 8f07bc19d0b1..ea6e145d882a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableConnectionManager.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/StubbableConnectionManager.java @@ -28,7 +28,6 @@ import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportConnectionListener; -import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -37,7 +36,7 @@ public class StubbableConnectionManager extends ConnectionManager { private final ConnectionManager delegate; private final ConcurrentMap getConnectionBehaviors; private volatile GetConnectionBehavior defaultGetConnectionBehavior = ConnectionManager::getConnection; - private volatile NodeConnectedBehavior defaultNodeConnectedBehavior = ConnectionManager::connectedNodes; + private volatile NodeConnectedBehavior defaultNodeConnectedBehavior = ConnectionManager::nodeConnected; public StubbableConnectionManager(ConnectionManager delegate, Settings settings, Transport transport) { super(settings, transport); @@ -45,7 +44,7 @@ public class StubbableConnectionManager extends ConnectionManager { this.getConnectionBehaviors = new ConcurrentHashMap<>(); } - public boolean addConnectBehavior(TransportAddress transportAddress, GetConnectionBehavior connectBehavior) { + public boolean addGetConnectionBehavior(TransportAddress transportAddress, GetConnectionBehavior connectBehavior) { return getConnectionBehaviors.put(transportAddress, connectBehavior) == null; } @@ -64,7 +63,6 @@ public class StubbableConnectionManager extends ConnectionManager { public void clearBehaviors() { defaultGetConnectionBehavior = ConnectionManager::getConnection; getConnectionBehaviors.clear(); - defaultNodeConnectedBehavior = ConnectionManager::connectedNodes; } public void clearBehavior(TransportAddress transportAddress) { @@ -85,12 +83,7 @@ public class StubbableConnectionManager extends ConnectionManager { @Override public boolean nodeConnected(DiscoveryNode node) { - return defaultNodeConnectedBehavior.connectedNodes(delegate).contains(node); - } - - @Override - public Set connectedNodes() { - return defaultNodeConnectedBehavior.connectedNodes(delegate); + return defaultNodeConnectedBehavior.connectedNodes(delegate, node); } @Override @@ -132,6 +125,6 @@ public class StubbableConnectionManager extends ConnectionManager { @FunctionalInterface public interface NodeConnectedBehavior { - Set connectedNodes(ConnectionManager connectionManager); + boolean connectedNodes(ConnectionManager connectionManager, DiscoveryNode node); } } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 07d01fa452a9..0911c4dc0fa2 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -162,12 +162,12 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { final CountDownLatch latch = new CountDownLatch(2); TransportConnectionListener waitForConnection = new TransportConnectionListener() { @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { latch.countDown(); } @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { fail("disconnect should not be called " + node); } }; @@ -684,12 +684,12 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { final CountDownLatch latch = new CountDownLatch(1); TransportConnectionListener disconnectListener = new TransportConnectionListener() { @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { fail("node connected should not be called, all connection have been done previously, node: " + node); } @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { latch.countDown(); } }; @@ -1726,12 +1726,12 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { final CountDownLatch latch = new CountDownLatch(4); TransportConnectionListener waitForConnection = new TransportConnectionListener() { @Override - public void onNodeConnected(DiscoveryNode node) { + public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) { latch.countDown(); } @Override - public void onNodeDisconnected(DiscoveryNode node) { + public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { fail("disconnect should not be called " + node); } }; diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiTests.java index 5012275624c6..01c0262411e5 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestApiTests.java @@ -29,6 +29,18 @@ import java.util.Set; public class ClientYamlSuiteRestApiTests extends ESTestCase { + public void testParseCommonSpec() throws IOException { + XContentParser parser = createParser(YamlXContent.yamlXContent, COMMON_SPEC); + ClientYamlSuiteRestSpec restSpec = new ClientYamlSuiteRestSpec(); + ClientYamlSuiteRestSpec.parseCommonSpec(parser, restSpec); + assertTrue(restSpec.isGlobalParameter("pretty")); + assertTrue(restSpec.isGlobalParameter("human")); + assertTrue(restSpec.isGlobalParameter("error_trace")); + assertTrue(restSpec.isGlobalParameter("source")); + assertTrue(restSpec.isGlobalParameter("filter_path")); + assertFalse(restSpec.isGlobalParameter("unknown")); + } + public void testPathMatching() throws IOException { XContentParser parser = createParser(YamlXContent.yamlXContent, REST_SPEC_API); ClientYamlSuiteRestApi restApi = new ClientYamlSuiteRestApiParser().parse("index.json", parser); @@ -66,6 +78,39 @@ public class ClientYamlSuiteRestApiTests extends ESTestCase { } } + private static final String COMMON_SPEC = "{\n"+ + " \"documentation\" : {\n"+ + " \"url\": \"Parameters that are accepted by all API endpoints.\",\n"+ + " \"documentation\": \"https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html\"\n"+ + " },\n"+ + " \"params\": {\n"+ + " \"pretty\": {\n"+ + " \"type\": \"boolean\",\n"+ + " \"description\": \"Pretty format the returned JSON response.\",\n"+ + " \"default\": false\n"+ + " },\n"+ + " \"human\": {\n"+ + " \"type\": \"boolean\",\n"+ + " \"description\": \"Return human readable values for statistics.\",\n"+ + " \"default\": true\n"+ + " },\n"+ + " \"error_trace\": {\n"+ + " \"type\": \"boolean\",\n"+ + " \"description\": \"Include the stack trace of returned errors.\",\n"+ + " \"default\": false\n"+ + " },\n"+ + " \"source\": {\n"+ + " \"type\": \"string\",\n"+ + " \"description\": \"The URL-encoded request definition." + + " Useful for libraries that do not accept a request body for non-POST requests.\"\n"+ + " },\n"+ + " \"filter_path\": {\n"+ + " \"type\": \"list\",\n"+ + " \"description\": \"A comma-separated list of filters used to reduce the response.\"\n"+ + " }\n"+ + " }\n"+ + "}\n"; + private static final String REST_SPEC_API = "{\n" + " \"index\":{\n" + " \"documentation\":{\n" + diff --git a/x-pack/docs/en/rest-api/security/create-api-keys.asciidoc b/x-pack/docs/en/rest-api/security/create-api-keys.asciidoc index 42541d0f3633..c0df9290719d 100644 --- a/x-pack/docs/en/rest-api/security/create-api-keys.asciidoc +++ b/x-pack/docs/en/rest-api/security/create-api-keys.asciidoc @@ -49,11 +49,12 @@ The following parameters can be specified in the body of a POST or PUT request: `role_descriptors`:: (Optional, array-of-role-descriptor) An array of role descriptors for this API key. This parameter is optional. When it is not specified or is an empty array, -then the API key will have the permissions of the authenticated user. If you -supply role descriptors, they must be a subset of the authenticated user's -permissions. The structure of role descriptor is the same as the request for -create role API. For more details, see -<>. +then the API key will have a _point in time snapshot of permissions of the +authenticated user_. If you supply role descriptors then the resultant permissions +would be an intersection of API keys permissions and authenticated user's permissions +thereby limiting the access scope for API keys. +The structure of role descriptor is the same as the request for create role API. +For more details, see <>. `expiration`:: (Optional, string) Expiration time for the API key. By default, API keys never diff --git a/x-pack/docs/en/rest-api/security/create-roles.asciidoc b/x-pack/docs/en/rest-api/security/create-roles.asciidoc index 7eda4c22b0dc..19802234f329 100644 --- a/x-pack/docs/en/rest-api/security/create-roles.asciidoc +++ b/x-pack/docs/en/rest-api/security/create-roles.asciidoc @@ -24,10 +24,9 @@ privilege. [[security-api-put-role-desc]] ==== {api-description-title} -The role API is generally the preferred way to manage roles, rather than using -file-based role management. For more information about the native realm, see -{stack-ov}/realms.html[Realms] and <>. - +The role management APIs are generally the preferred way to manage roles, rather than using +{stack-ov}/defining-roles.html#roles-management-file[file-based role management]. The create +or update roles API cannot update roles that are defined in roles files. [[security-api-put-role-path-params]] ==== {api-path-parms-title} diff --git a/x-pack/docs/en/rest-api/security/delete-roles.asciidoc b/x-pack/docs/en/rest-api/security/delete-roles.asciidoc index dec674b65776..ce5906ad8e32 100644 --- a/x-pack/docs/en/rest-api/security/delete-roles.asciidoc +++ b/x-pack/docs/en/rest-api/security/delete-roles.asciidoc @@ -22,10 +22,8 @@ Removes roles in the native realm. [[security-api-delete-role-desc]] ==== {api-description-title} -The Roles API is generally the preferred way to manage roles, rather than using -file-based role management. For more information about the native realm, see -{stack-ov}/realms.html[Realms] and <>. - +The role management APIs are generally the preferred way to manage roles, rather than using +{stack-ov}/defining-roles.html#roles-management-file[file-based role management]. The delete roles API cannot remove roles that are defined in roles files. [[security-api-delete-role-path-params]] ==== {api-path-parms-title} diff --git a/x-pack/docs/en/rest-api/security/get-roles.asciidoc b/x-pack/docs/en/rest-api/security/get-roles.asciidoc index f014166362ee..de7234697d33 100644 --- a/x-pack/docs/en/rest-api/security/get-roles.asciidoc +++ b/x-pack/docs/en/rest-api/security/get-roles.asciidoc @@ -23,8 +23,9 @@ privilege. [[security-api-get-role-desc]] ==== {api-description-title} -For more information about the native realm, see -{stack-ov}/realms.html[Realms] and <>. +The role management APIs are generally the preferred way to manage roles, rather than using +{stack-ov}/defining-roles.html#roles-management-file[file-based role management]. The get roles +API cannot retrieve roles that are defined in roles files. [[security-api-get-role-path-params]] ==== {api-path-parms-title} diff --git a/x-pack/docs/en/rest-api/security/role-mapping-resources.asciidoc b/x-pack/docs/en/rest-api/security/role-mapping-resources.asciidoc index ad90bef063a8..13e6457d7ef3 100644 --- a/x-pack/docs/en/rest-api/security/role-mapping-resources.asciidoc +++ b/x-pack/docs/en/rest-api/security/role-mapping-resources.asciidoc @@ -26,7 +26,7 @@ A rule is a logical condition that is expressed by using a JSON DSL. The DSL sup (array of rules) If *all* of its children are true, it evaluates to `true`. `field`::: (object) See <>. -`except`:: +`except`::: (object) A single rule as an object. Only valid as a child of an `all` rule. If its child is `false`, the `except` is `true`. diff --git a/x-pack/docs/en/security/authorization/managing-roles.asciidoc b/x-pack/docs/en/security/authorization/managing-roles.asciidoc index ee984296f08c..eab8e7f573b4 100644 --- a/x-pack/docs/en/security/authorization/managing-roles.asciidoc +++ b/x-pack/docs/en/security/authorization/managing-roles.asciidoc @@ -214,7 +214,16 @@ _Role Management APIs_, the role found in the file will be used. While the _Role Management APIs_ is the preferred mechanism to define roles, using the `roles.yml` file becomes useful if you want to define fixed roles that no one (beside an administrator having physical access to the {es} nodes) -would be able to change. +would be able to change. Please note however, that the `roles.yml` file is provided as a +minimal administrative function and is not intended to cover and be used +to define roles for all use cases. + +[IMPORTANT] +============================== +You cannot view, edit, or remove any roles that are defined in `roles.yml` by +using the <> or the +<>. +============================== [IMPORTANT] ============================== diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityAggregatorTests.java index 1d7aa8a6ffc4..705ff986b135 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityAggregatorTests.java @@ -226,32 +226,4 @@ public class CumulativeCardinalityAggregatorTests extends AggregatorTestCase { private static long asLong(String dateTime) { return DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli(); } - - - private static AggregatorFactory getRandomSequentiallyOrderedParentAgg() throws IOException { - AggregatorFactory factory; - ValuesSourceConfig valuesSource = new ValuesSourceConfig<>(ValuesSourceType.NUMERIC); - ValuesSourceConfig numericVS = new ValuesSourceConfig<>(ValuesSourceType.NUMERIC); - switch (randomIntBetween(0, 2)) { - case 0: - factory = new HistogramAggregatorFactory("name", valuesSource, 0.0d, 0.0d, - mock(InternalOrder.class), false, 0L, 0.0d, 1.0d, mock(QueryShardContext.class), null, - new AggregatorFactories.Builder(), Collections.emptyMap()); - break; - case 1: - factory = new DateHistogramAggregatorFactory("name", valuesSource, 0L, - mock(InternalOrder.class), false, 0L, mock(Rounding.class), mock(Rounding.class), - mock(ExtendedBounds.class), mock(QueryShardContext.class), mock(AggregatorFactory.class), - new AggregatorFactories.Builder(), Collections.emptyMap()); - break; - case 2: - default: - AutoDateHistogramAggregationBuilder.RoundingInfo[] roundings = new AutoDateHistogramAggregationBuilder.RoundingInfo[1]; - factory = new AutoDateHistogramAggregatorFactory("name", numericVS, - 1, roundings, - mock(QueryShardContext.class), null, new AggregatorFactories.Builder(), Collections.emptyMap()); - } - - return factory; - } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java index 293fc04989f9..ddf977cd6a70 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java @@ -296,7 +296,7 @@ public class CcrRepository extends AbstractLifecycleComponent implements Reposit @Override public void snapshotShard(Store store, MapperService mapperService, SnapshotId snapshotId, IndexId indexId, - IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, ActionListener listener) { + IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, ActionListener listener) { throw new UnsupportedOperationException("Unsupported for repository of type: " + TYPE); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java index ce1120a02373..873bce2a50f5 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java @@ -105,8 +105,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLong; -import java.util.function.BooleanSupplier; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -608,62 +606,48 @@ public abstract class CcrIntegTestCase extends ESTestCase { * @param numDocs number of documents to wait for * @param indexer a {@link org.elasticsearch.test.BackgroundIndexer}. Will be first checked for documents indexed. * This saves on unneeded searches. - * @return the actual number of docs seen. */ - public long waitForDocs(final long numDocs, final BackgroundIndexer indexer) throws InterruptedException { + public void waitForDocs(final long numDocs, final BackgroundIndexer indexer) throws Exception { // indexing threads can wait for up to ~1m before retrying when they first try to index into a shard which is not STARTED. - return waitForDocs(numDocs, 90, TimeUnit.SECONDS, indexer); - } + final long maxWaitTimeMs = Math.max(90 * 1000, 200 * numDocs); - /** - * Waits until at least a give number of document is visible for searchers - * - * @param numDocs number of documents to wait for - * @param maxWaitTime if not progress have been made during this time, fail the test - * @param maxWaitTimeUnit the unit in which maxWaitTime is specified - * @param indexer Will be first checked for documents indexed. - * This saves on unneeded searches. - * @return the actual number of docs seen. - */ - public long waitForDocs(final long numDocs, int maxWaitTime, TimeUnit maxWaitTimeUnit, final BackgroundIndexer indexer) - throws InterruptedException { - final AtomicLong lastKnownCount = new AtomicLong(-1); - long lastStartCount = -1; - BooleanSupplier testDocs = () -> { - lastKnownCount.set(indexer.totalIndexedDocs()); - if (lastKnownCount.get() >= numDocs) { - try { - long count = indexer.getClient().prepareSearch() - .setTrackTotalHits(true) - .setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits().getTotalHits().value; + assertBusy( + () -> { + long lastKnownCount = indexer.totalIndexedDocs(); - if (count == lastKnownCount.get()) { - // no progress - try to refresh for the next time - indexer.getClient().admin().indices().prepareRefresh().get(); + if (lastKnownCount >= numDocs) { + try { + long count = indexer.getClient().prepareSearch() + .setTrackTotalHits(true) + .setSize(0) + .setQuery(QueryBuilders.matchAllQuery()) + .get() + .getHits().getTotalHits().value; + + if (count == lastKnownCount) { + // no progress - try to refresh for the next time + indexer.getClient().admin().indices().prepareRefresh().get(); + } + lastKnownCount = count; + } catch (Exception e) { // count now acts like search and barfs if all shards failed... + logger.debug("failed to executed count", e); + throw e; } - lastKnownCount.set(count); - } catch (Exception e) { // count now acts like search and barfs if all shards failed... - logger.debug("failed to executed count", e); - return false; } - logger.debug("[{}] docs visible for search. waiting for [{}]", lastKnownCount.get(), numDocs); - } else { - logger.debug("[{}] docs indexed. waiting for [{}]", lastKnownCount.get(), numDocs); - } - return lastKnownCount.get() >= numDocs; - }; - while (!awaitBusy(testDocs, maxWaitTime, maxWaitTimeUnit)) { - if (lastStartCount == lastKnownCount.get()) { - // we didn't make any progress - fail("failed to reach " + numDocs + "docs"); - } - lastStartCount = lastKnownCount.get(); - } - return lastKnownCount.get(); + if (logger.isDebugEnabled()) { + if (lastKnownCount < numDocs) { + logger.debug("[{}] docs indexed. waiting for [{}]", lastKnownCount, numDocs); + } else { + logger.debug("[{}] docs visible for search (needed [{}])", lastKnownCount, numDocs); + } + } + + assertThat(lastKnownCount, greaterThanOrEqualTo(numDocs)); + }, + maxWaitTimeMs, + TimeUnit.MILLISECONDS + ); } protected ActionListener waitForRestore( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java index 29c831438f90..1d8230348326 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.protocol.xpack.graph; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; @@ -24,7 +25,6 @@ import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Iterator; import java.util.List; /** @@ -37,7 +37,6 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest public static final String NO_VERTICES_ERROR_MESSAGE = "Graph explore hop must have at least one VertexRequest"; private String[] indices = Strings.EMPTY_ARRAY; private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, false); - private String[] types = Strings.EMPTY_ARRAY; private String routing; private TimeValue timeout; @@ -96,37 +95,15 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest return this; } - /** - * The document types to execute the explore against. Defaults to be executed against - * all types. - * - * @deprecated Types are in the process of being removed. Instead of using a type, prefer to - * filter on a field on the document. - */ - @Deprecated - public String[] types() { - return this.types; - } - - /** - * The document types to execute the explore request against. Defaults to be executed against - * all types. - * - * @deprecated Types are in the process of being removed. Instead of using a type, prefer to - * filter on a field on the document. - */ - @Deprecated - public GraphExploreRequest types(String... types) { - this.types = types; - return this; - } - public GraphExploreRequest(StreamInput in) throws IOException { super(in); indices = in.readStringArray(); indicesOptions = IndicesOptions.readIndicesOptions(in); - types = in.readStringArray(); + if (in.getVersion().before(Version.V_8_0_0)) { + String[] types = in.readStringArray(); + assert types.length == 0; + } routing = in.readOptionalString(); timeout = in.readOptionalTimeValue(); sampleSize = in.readInt(); @@ -169,7 +146,7 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest * operations involved in each hop are limited to the remaining time * available but can still overrun due to the nature of their "best efforts" * timeout support. When a timeout occurs partial results are returned. - * + * * @param timeout * a {@link TimeValue} object which determines the maximum length * of time to spend exploring @@ -192,7 +169,9 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest super.writeTo(out); out.writeStringArray(indices); indicesOptions.writeIndicesOptions(out); - out.writeStringArray(types); + if (out.getVersion().before(Version.V_8_0_0)) { + out.writeStringArray(Strings.EMPTY_ARRAY); + } out.writeOptionalString(routing); out.writeOptionalTimeValue(timeout); @@ -203,15 +182,14 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest out.writeBoolean(useSignificance); out.writeBoolean(returnDetailedInfo); out.writeInt(hops.size()); - for (Iterator iterator = hops.iterator(); iterator.hasNext();) { - Hop hop = iterator.next(); + for (Hop hop : hops) { hop.writeTo(out); } } @Override public String toString() { - return "graph explore [" + Arrays.toString(indices) + "][" + Arrays.toString(types) + "]"; + return "graph explore [" + Arrays.toString(indices) + "]"; } /** @@ -227,7 +205,7 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest * better with smaller samples as there are less look-ups required for * background frequencies of terms found in the documents *

    - * + * * @param maxNumberOfDocsPerHop * shard-level sample size in documents */ @@ -268,7 +246,7 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest * default value is true which means terms are selected based on * significance (see the {@link SignificantTerms} aggregation) rather than * popularity (using the {@link TermsAggregator}). - * + * * @param value * true if the significant_terms algorithm should be used. */ @@ -283,7 +261,7 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest /** * Return detailed information about vertex frequencies as part of JSON * results - defaults to false - * + * * @param value * true if detailed information is required in JSON responses */ @@ -299,7 +277,7 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest * Add a stage in the graph exploration. Each hop represents a stage of * querying elasticsearch to identify terms which can then be connnected to * other terms in a subsequent hop. - * + * * @param guidingQuery * optional choice of query which influences which documents are * considered in this stage @@ -364,7 +342,7 @@ public class GraphExploreRequest extends ActionRequest implements IndicesRequest @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - + builder.startObject("controls"); { if (sampleSize != SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/SourceOnlySnapshotRepository.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/SourceOnlySnapshotRepository.java index 280e4a434457..df422aec73cd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/SourceOnlySnapshotRepository.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/SourceOnlySnapshotRepository.java @@ -133,7 +133,7 @@ public final class SourceOnlySnapshotRepository extends FilterRepository { @Override public void snapshotShard(Store store, MapperService mapperService, SnapshotId snapshotId, IndexId indexId, - IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, ActionListener listener) { + IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus, ActionListener listener) { if (mapperService.documentMapper() != null // if there is no mapping this is null && mapperService.documentMapper().sourceMapper().isComplete() == false) { listener.onFailure( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 9f403953eb17..19451e5833e9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -138,6 +138,12 @@ import org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification.P import org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification.Recall; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification.ScoreByThresholdResult; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification.SoftClassificationMetric; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TrainedModel; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree.Tree; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.FrequencyEncoding; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.OneHotEncoding; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.PreProcessor; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.TargetMeanEncoding; import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.core.monitoring.MonitoringFeatureSetUsage; import org.elasticsearch.xpack.core.rollup.RollupFeatureSetUsage; @@ -193,8 +199,8 @@ import org.elasticsearch.xpack.core.ssl.action.GetCertificateInfoAction; import org.elasticsearch.xpack.core.transform.TransformFeatureSetUsage; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction; import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction; import org.elasticsearch.xpack.core.transform.action.PutTransformAction; import org.elasticsearch.xpack.core.transform.action.StartTransformAction; @@ -390,8 +396,8 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl StartTransformAction.INSTANCE, StopTransformAction.INSTANCE, DeleteTransformAction.INSTANCE, - GetTransformsAction.INSTANCE, - GetTransformsStatsAction.INSTANCE, + GetTransformAction.INSTANCE, + GetTransformStatsAction.INSTANCE, PreviewTransformAction.INSTANCE ); } @@ -437,6 +443,12 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl new NamedWriteableRegistry.Entry(EvaluationMetricResult.class, ScoreByThresholdResult.NAME, ScoreByThresholdResult::new), new NamedWriteableRegistry.Entry(EvaluationMetricResult.class, ConfusionMatrix.NAME.getPreferredName(), ConfusionMatrix.Result::new), + // ML - Inference preprocessing + new NamedWriteableRegistry.Entry(PreProcessor.class, FrequencyEncoding.NAME.getPreferredName(), FrequencyEncoding::new), + new NamedWriteableRegistry.Entry(PreProcessor.class, OneHotEncoding.NAME.getPreferredName(), OneHotEncoding::new), + new NamedWriteableRegistry.Entry(PreProcessor.class, TargetMeanEncoding.NAME.getPreferredName(), TargetMeanEncoding::new), + // ML - Inference models + new NamedWriteableRegistry.Entry(TrainedModel.class, Tree.NAME.getPreferredName(), Tree::new), // monitoring new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.MONITORING, MonitoringFeatureSetUsage::new), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreRequestBuilder.java index 37456f234648..38e02be2656b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreRequestBuilder.java @@ -20,7 +20,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; /** * Creates a new {@link GraphExploreRequestBuilder} - * + * * @see GraphExploreRequest */ public class GraphExploreRequestBuilder extends ActionRequestBuilder { @@ -106,16 +106,7 @@ public class GraphExploreRequestBuilder extends ActionRequestBuilder Very large values (many thousands) are not recommended with loosely defined queries (fuzzy queries or + * significant connections between terms. + *

    Very large values (many thousands) are not recommended with loosely defined queries (fuzzy queries or * those with many OR clauses). * This is because any useful signals in the best documents are diluted with irrelevant noise from low-quality matches. - * Performance is also typically better with smaller samples as there are less look-ups required for background frequencies - * of terms found in the documents + * Performance is also typically better with smaller samples as there are less look-ups required for background frequencies + * of terms found in the documents *

    - * + * * @param maxNumberOfDocsPerHop the shard-level sample size in documents */ public GraphExploreRequestBuilder sampleSize(int maxNumberOfDocsPerHop) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleOriginationDateParser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleOriginationDateParser.java new file mode 100644 index 000000000000..05b362d733e9 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleOriginationDateParser.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ilm; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatter; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static org.elasticsearch.xpack.core.ilm.LifecycleSettings.LIFECYCLE_ORIGINATION_DATE; +import static org.elasticsearch.xpack.core.ilm.LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE; + +public class IndexLifecycleOriginationDateParser { + + private static final DateFormatter DATE_FORMATTER = DateFormatter.forPattern("yyyy.MM.dd"); + private static final String INDEX_NAME_REGEX = "^.*-(\\d{4}.\\d{2}.\\d{2})(-[\\d]+)?$"; + private static final Pattern INDEX_NAME_PATTERN = Pattern.compile(INDEX_NAME_REGEX); + + /** + * Determines if the origination date needs to be parsed from the index name. + */ + public static boolean shouldParseIndexName(Settings indexSettings) { + return indexSettings.getAsLong(LIFECYCLE_ORIGINATION_DATE, -1L) == -1L && + indexSettings.getAsBoolean(LIFECYCLE_PARSE_ORIGINATION_DATE, false); + } + + /** + * Parses the index according to the supported format and extracts the origination date. If the index does not match the expected + * format or the date in the index name doesn't match the `yyyy.MM.dd` format it throws an {@link IllegalArgumentException} + */ + public static long parseIndexNameAndExtractDate(String indexName) { + Matcher matcher = INDEX_NAME_PATTERN.matcher(indexName); + if (matcher.matches()) { + String dateAsString = matcher.group(1); + try { + return DATE_FORMATTER.parseMillis(dateAsString); + } catch (ElasticsearchParseException | IllegalArgumentException e) { + throw new IllegalArgumentException("index name [" + indexName + "] contains date [" + dateAsString + "] which " + + "couldn't be parsed using the 'yyyy.MM.dd' format", e); + } + } + + throw new IllegalArgumentException("index name [" + indexName + "] does not match pattern '" + INDEX_NAME_REGEX + "'"); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/InitializePolicyContextStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/InitializePolicyContextStep.java index c0ba7ba54114..3e7ad7a6a07d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/InitializePolicyContextStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/InitializePolicyContextStep.java @@ -10,8 +10,11 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; +import static org.elasticsearch.xpack.core.ilm.IndexLifecycleOriginationDateParser.parseIndexNameAndExtractDate; +import static org.elasticsearch.xpack.core.ilm.IndexLifecycleOriginationDateParser.shouldParseIndexName; import static org.elasticsearch.xpack.core.ilm.LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY; /** @@ -34,19 +37,34 @@ public final class InitializePolicyContextStep extends ClusterStateActionStep { // Index must have been since deleted, ignore it return clusterState; } + LifecycleExecutionState lifecycleState = LifecycleExecutionState .fromIndexMetadata(indexMetaData); + if (lifecycleState.getLifecycleDate() != null) { return clusterState; } + IndexMetaData.Builder indexMetadataBuilder = IndexMetaData.builder(indexMetaData); + if (shouldParseIndexName(indexMetaData.getSettings())) { + long parsedOriginationDate = parseIndexNameAndExtractDate(index.getName()); + indexMetadataBuilder.settingsVersion(indexMetaData.getSettingsVersion() + 1) + .settings(Settings.builder() + .put(indexMetaData.getSettings()) + .put(LifecycleSettings.LIFECYCLE_ORIGINATION_DATE, parsedOriginationDate) + .build() + ); + } + ClusterState.Builder newClusterStateBuilder = ClusterState.builder(clusterState); LifecycleExecutionState.Builder newCustomData = LifecycleExecutionState.builder(lifecycleState); newCustomData.setIndexCreationDate(indexMetaData.getCreationDate()); - newClusterStateBuilder.metaData(MetaData.builder(clusterState.getMetaData()).put(IndexMetaData - .builder(indexMetaData) - .putCustom(ILM_CUSTOM_METADATA_KEY, newCustomData.build().asMap()))); + indexMetadataBuilder.putCustom(ILM_CUSTOM_METADATA_KEY, newCustomData.build().asMap()); + + newClusterStateBuilder.metaData( + MetaData.builder(clusterState.getMetaData()).put(indexMetadataBuilder) + ); return newClusterStateBuilder.build(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecycleSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecycleSettings.java index 4aa98ad8296f..6fea7cf87737 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecycleSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecycleSettings.java @@ -18,6 +18,7 @@ public class LifecycleSettings { public static final String LIFECYCLE_NAME = "index.lifecycle.name"; public static final String LIFECYCLE_INDEXING_COMPLETE = "index.lifecycle.indexing_complete"; public static final String LIFECYCLE_ORIGINATION_DATE = "index.lifecycle.origination_date"; + public static final String LIFECYCLE_PARSE_ORIGINATION_DATE = "index.lifecycle.parse_origination_date"; public static final String SLM_HISTORY_INDEX_ENABLED = "slm.history_index_enabled"; public static final String SLM_RETENTION_SCHEDULE = "slm.retention_schedule"; @@ -32,6 +33,8 @@ public class LifecycleSettings { Setting.Property.Dynamic, Setting.Property.IndexScope); public static final Setting LIFECYCLE_ORIGINATION_DATE_SETTING = Setting.longSetting(LIFECYCLE_ORIGINATION_DATE, -1, -1, Setting.Property.Dynamic, Setting.Property.IndexScope); + public static final Setting LIFECYCLE_PARSE_ORIGINATION_DATE_SETTING = Setting.boolSetting(LIFECYCLE_PARSE_ORIGINATION_DATE, + false, Setting.Property.Dynamic, Setting.Property.IndexScope); public static final Setting SLM_HISTORY_INDEX_ENABLED_SETTING = Setting.boolSetting(SLM_HISTORY_INDEX_ENABLED, true, Setting.Property.NodeScope); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java index b3b2a3b6666a..7f8486223928 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EvaluateDataFrameAction.java @@ -105,28 +105,31 @@ public class EvaluateDataFrameAction extends ActionType indices) { + public final Request setIndices(List indices) { ExceptionsHelper.requireNonNull(indices, INDEX); if (indices.isEmpty()) { throw ExceptionsHelper.badRequestException("At least one index must be specified"); } this.indices = indices.toArray(new String[indices.size()]); + return this; } public QueryBuilder getParsedQuery() { return Optional.ofNullable(queryProvider).orElseGet(QueryProvider::defaultQuery).getParsedQuery(); } - public final void setQueryProvider(QueryProvider queryProvider) { + public final Request setQueryProvider(QueryProvider queryProvider) { this.queryProvider = queryProvider; + return this; } public Evaluation getEvaluation() { return evaluation; } - public final void setEvaluation(Evaluation evaluation) { + public final Request setEvaluation(Evaluation evaluation) { this.evaluation = ExceptionsHelper.requireNonNull(evaluation, EVALUATION); + return this; } @Override @@ -203,6 +206,14 @@ public class EvaluateDataFrameAction extends ActionType getMetrics() { + return metrics; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(evaluationName); @@ -214,7 +225,7 @@ public class EvaluateDataFrameAction extends ActionType getMetrics(); /** - * Computes the evaluation result - * @param searchResponse The search response required to compute the result - * @param listener A listener of the results + * Builds the search required to collect data to compute the evaluation result + * @param userProvidedQueryBuilder User-provided query that must be respected when collecting data */ - void evaluate(SearchResponse searchResponse, ActionListener> listener); + SearchSourceBuilder buildSearch(QueryBuilder userProvidedQueryBuilder); + + /** + * Builds the search that verifies existence of required fields and applies user-provided query + * @param requiredFields fields that must exist + * @param userProvidedQueryBuilder user-provided query + */ + default SearchSourceBuilder newSearchSourceBuilder(List requiredFields, QueryBuilder userProvidedQueryBuilder) { + BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); + for (String requiredField : requiredFields) { + boolQuery.filter(QueryBuilders.existsQuery(requiredField)); + } + boolQuery.filter(userProvidedQueryBuilder); + return new SearchSourceBuilder().size(0).query(boolQuery); + } + + /** + * Processes {@link SearchResponse} from the search action + * @param searchResponse response from the search action + */ + void process(SearchResponse searchResponse); + + /** + * @return true iff all the metrics have their results computed + */ + default boolean hasAllResults() { + return getMetrics().stream().map(EvaluationMetric::getResult).allMatch(Optional::isPresent); + } + + /** + * Returns the list of evaluation results + * @return list of evaluation results + */ + default List getResults() { + return getMetrics().stream() + .map(EvaluationMetric::getResult) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList()); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetric.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetric.java new file mode 100644 index 000000000000..54934b64652c --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetric.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.dataframe.evaluation; + +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.xcontent.ToXContentObject; + +import java.util.Optional; + +/** + * {@link EvaluationMetric} class represents a metric to evaluate. + */ +public interface EvaluationMetric extends ToXContentObject, NamedWriteable { + + /** + * Returns the name of the metric (which may differ to the writeable name) + */ + String getName(); + + /** + * Gets the evaluation result for this metric. + * @return {@code Optional.empty()} if the result is not available yet, {@code Optional.of(result)} otherwise + */ + Optional getResult(); +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetricResult.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetricResult.java index 36b8adf9d4ea..06c7719a401a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetricResult.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationMetricResult.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; public interface EvaluationMetricResult extends ToXContentObject, NamedWriteable { /** - * Returns the name of the metric + * Returns the name of the metric (which may differ to the writeable name) */ - String getName(); + String getMetricName(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredError.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredError.java index e48cb46b5c0a..0d652f511807 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredError.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredError.java @@ -20,10 +20,10 @@ import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResu import java.io.IOException; import java.text.MessageFormat; -import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Objects; +import java.util.Optional; /** * Calculates the mean squared error between two known numerical fields. @@ -48,28 +48,34 @@ public class MeanSquaredError implements RegressionMetric { return PARSER.apply(parser, null); } - public MeanSquaredError(StreamInput in) { + private EvaluationMetricResult result; - } + public MeanSquaredError(StreamInput in) {} - public MeanSquaredError() { - - } + public MeanSquaredError() {} @Override - public String getMetricName() { + public String getName() { return NAME.getPreferredName(); } @Override public List aggs(String actualField, String predictedField) { - return Collections.singletonList(AggregationBuilders.avg(AGG_NAME).script(new Script(buildScript(actualField, predictedField)))); + if (result != null) { + return List.of(); + } + return List.of(AggregationBuilders.avg(AGG_NAME).script(new Script(buildScript(actualField, predictedField)))); } @Override - public EvaluationMetricResult evaluate(Aggregations aggs) { + public void process(Aggregations aggs) { NumericMetricsAggregation.SingleValue value = aggs.get(AGG_NAME); - return value == null ? new Result(0.0) : new Result(value.value()); + result = value == null ? new Result(0.0) : new Result(value.value()); + } + + @Override + public Optional getResult() { + return Optional.ofNullable(result); } @Override @@ -121,7 +127,7 @@ public class MeanSquaredError implements RegressionMetric { } @Override - public String getName() { + public String getMetricName() { return NAME.getPreferredName(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquared.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquared.java index a55306561833..e2794d548433 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquared.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RSquared.java @@ -22,10 +22,10 @@ import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResu import java.io.IOException; import java.text.MessageFormat; -import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.Objects; +import java.util.Optional; /** * Calculates R-Squared between two known numerical fields. @@ -53,36 +53,42 @@ public class RSquared implements RegressionMetric { return PARSER.apply(parser, null); } - public RSquared(StreamInput in) { + private EvaluationMetricResult result; - } + public RSquared(StreamInput in) {} - public RSquared() { - - } + public RSquared() {} @Override - public String getMetricName() { + public String getName() { return NAME.getPreferredName(); } @Override public List aggs(String actualField, String predictedField) { - return Arrays.asList( + if (result != null) { + return List.of(); + } + return List.of( AggregationBuilders.sum(SS_RES).script(new Script(buildScript(actualField, predictedField))), AggregationBuilders.extendedStats(ExtendedStatsAggregationBuilder.NAME + "_actual").field(actualField)); } @Override - public EvaluationMetricResult evaluate(Aggregations aggs) { + public void process(Aggregations aggs) { NumericMetricsAggregation.SingleValue residualSumOfSquares = aggs.get(SS_RES); ExtendedStats extendedStats = aggs.get(ExtendedStatsAggregationBuilder.NAME + "_actual"); // extendedStats.getVariance() is the statistical sumOfSquares divided by count - return residualSumOfSquares == null || extendedStats == null || extendedStats.getCount() == 0 ? + result = residualSumOfSquares == null || extendedStats == null || extendedStats.getCount() == 0 ? new Result(0.0) : new Result(1 - (residualSumOfSquares.value() / (extendedStats.getVariance() * extendedStats.getCount()))); } + @Override + public Optional getResult() { + return Optional.ofNullable(result); + } + @Override public String getWriteableName() { return NAME.getPreferredName(); @@ -132,7 +138,7 @@ public class RSquared implements RegressionMetric { } @Override - public String getName() { + public String getMetricName() { return NAME.getPreferredName(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Regression.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Regression.java index bb2540a8691b..c5f1a7a2fde2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Regression.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/Regression.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.dataframe.evaluation.regression; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -14,17 +13,15 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; @@ -86,19 +83,16 @@ public class Regression implements Evaluation { } private static List initMetrics(@Nullable List parsedMetrics) { - List metrics = parsedMetrics == null ? defaultMetrics() : parsedMetrics; + List metrics = parsedMetrics == null ? defaultMetrics() : new ArrayList<>(parsedMetrics); if (metrics.isEmpty()) { throw ExceptionsHelper.badRequestException("[{}] must have one or more metrics", NAME.getPreferredName()); } - Collections.sort(metrics, Comparator.comparing(RegressionMetric::getMetricName)); + Collections.sort(metrics, Comparator.comparing(RegressionMetric::getName)); return metrics; } private static List defaultMetrics() { - List defaultMetrics = new ArrayList<>(2); - defaultMetrics.add(new MeanSquaredError()); - defaultMetrics.add(new RSquared()); - return defaultMetrics; + return Arrays.asList(new MeanSquaredError(), new RSquared()); } @Override @@ -107,12 +101,14 @@ public class Regression implements Evaluation { } @Override - public SearchSourceBuilder buildSearch(QueryBuilder queryBuilder) { - BoolQueryBuilder boolQuery = QueryBuilders.boolQuery() - .filter(QueryBuilders.existsQuery(actualField)) - .filter(QueryBuilders.existsQuery(predictedField)) - .filter(queryBuilder); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(0).query(boolQuery); + public List getMetrics() { + return metrics; + } + + @Override + public SearchSourceBuilder buildSearch(QueryBuilder userProvidedQueryBuilder) { + ExceptionsHelper.requireNonNull(userProvidedQueryBuilder, "userProvidedQueryBuilder"); + SearchSourceBuilder searchSourceBuilder = newSearchSourceBuilder(List.of(actualField, predictedField), userProvidedQueryBuilder); for (RegressionMetric metric : metrics) { List aggs = metric.aggs(actualField, predictedField); aggs.forEach(searchSourceBuilder::aggregation); @@ -121,18 +117,14 @@ public class Regression implements Evaluation { } @Override - public void evaluate(SearchResponse searchResponse, ActionListener> listener) { - List results = new ArrayList<>(metrics.size()); + public void process(SearchResponse searchResponse) { + ExceptionsHelper.requireNonNull(searchResponse, "searchResponse"); if (searchResponse.getHits().getTotalHits().value == 0) { - listener.onFailure(ExceptionsHelper.badRequestException("No documents found containing both [{}, {}] fields", - actualField, - predictedField)); - return; + throw ExceptionsHelper.badRequestException("No documents found containing both [{}, {}] fields", actualField, predictedField); } for (RegressionMetric metric : metrics) { - results.add(metric.evaluate(searchResponse.getAggregations())); + metric.process(searchResponse.getAggregations()); } - listener.onResponse(results); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RegressionMetric.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RegressionMetric.java index 1da48e2f305e..08dfbfab4aa7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RegressionMetric.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RegressionMetric.java @@ -5,20 +5,14 @@ */ package org.elasticsearch.xpack.core.ml.dataframe.evaluation.regression; -import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; +import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetric; import java.util.List; -public interface RegressionMetric extends ToXContentObject, NamedWriteable { - - /** - * Returns the name of the metric (which may differ to the writeable name) - */ - String getMetricName(); +public interface RegressionMetric extends EvaluationMetric { /** * Builds the aggregation that collect required data to compute the metric @@ -29,9 +23,8 @@ public interface RegressionMetric extends ToXContentObject, NamedWriteable { List aggs(String actualField, String predictedField); /** - * Calculates the metric result - * @param aggs the aggregations - * @return the metric result + * Processes given aggregations as a step towards computing result + * @param aggs aggregations from {@link SearchResponse} */ - EvaluationMetricResult evaluate(Aggregations aggs); + void process(Aggregations aggs); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/AbstractConfusionMatrixMetric.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/AbstractConfusionMatrixMetric.java index facdcceea194..9ce186c524aa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/AbstractConfusionMatrixMetric.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/AbstractConfusionMatrixMetric.java @@ -13,27 +13,30 @@ import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Optional; abstract class AbstractConfusionMatrixMetric implements SoftClassificationMetric { public static final ParseField AT = new ParseField("at"); protected final double[] thresholds; + private EvaluationMetricResult result; protected AbstractConfusionMatrixMetric(double[] thresholds) { this.thresholds = ExceptionsHelper.requireNonNull(thresholds, AT); if (thresholds.length == 0) { - throw ExceptionsHelper.badRequestException("[" + getMetricName() + "." + AT.getPreferredName() - + "] must have at least one value"); + throw ExceptionsHelper.badRequestException("[" + getName() + "." + AT.getPreferredName() + "] must have at least one value"); } for (double threshold : thresholds) { if (threshold < 0 || threshold > 1.0) { - throw ExceptionsHelper.badRequestException("[" + getMetricName() + "." + AT.getPreferredName() + throw ExceptionsHelper.badRequestException("[" + getName() + "." + AT.getPreferredName() + "] values must be in [0.0, 1.0]"); } } @@ -58,6 +61,9 @@ abstract class AbstractConfusionMatrixMetric implements SoftClassificationMetric @Override public final List aggs(String actualField, List classInfos) { + if (result != null) { + return List.of(); + } List aggs = new ArrayList<>(); for (double threshold : thresholds) { aggs.addAll(aggsAt(actualField, classInfos, threshold)); @@ -65,14 +71,26 @@ abstract class AbstractConfusionMatrixMetric implements SoftClassificationMetric return aggs; } + @Override + public void process(ClassInfo classInfo, Aggregations aggs) { + result = evaluate(classInfo, aggs); + } + + @Override + public Optional getResult() { + return Optional.ofNullable(result); + } + protected abstract List aggsAt(String labelField, List classInfos, double threshold); + protected abstract EvaluationMetricResult evaluate(ClassInfo classInfo, Aggregations aggs); + protected enum Condition { TP, FP, TN, FN; } protected String aggName(ClassInfo classInfo, double threshold, Condition condition) { - return getMetricName() + "_" + classInfo.getName() + "_at_" + threshold + "_" + condition.name(); + return getName() + "_" + classInfo.getName() + "_at_" + threshold + "_" + condition.name(); } protected AggregationBuilder buildAgg(ClassInfo classInfo, double threshold, Condition condition) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/AucRoc.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/AucRoc.java index 228dac00bfb6..188713b03712 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/AucRoc.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/AucRoc.java @@ -30,6 +30,7 @@ import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.stream.IntStream; /** @@ -70,6 +71,7 @@ public class AucRoc implements SoftClassificationMetric { } private final boolean includeCurve; + private EvaluationMetricResult result; public AucRoc(Boolean includeCurve) { this.includeCurve = includeCurve == null ? false : includeCurve; @@ -98,7 +100,7 @@ public class AucRoc implements SoftClassificationMetric { } @Override - public String getMetricName() { + public String getName() { return NAME.getPreferredName(); } @@ -117,6 +119,9 @@ public class AucRoc implements SoftClassificationMetric { @Override public List aggs(String actualField, List classInfos) { + if (result != null) { + return List.of(); + } double[] percentiles = IntStream.range(1, 100).mapToDouble(v -> (double) v).toArray(); List aggs = new ArrayList<>(); for (ClassInfo classInfo : classInfos) { @@ -134,22 +139,31 @@ public class AucRoc implements SoftClassificationMetric { return aggs; } - private String evaluatedLabelAggName(ClassInfo classInfo) { - return getMetricName() + "_" + classInfo.getName(); - } - - private String restLabelsAggName(ClassInfo classInfo) { - return getMetricName() + "_non_" + classInfo.getName(); + @Override + public void process(ClassInfo classInfo, Aggregations aggs) { + result = evaluate(classInfo, aggs); } @Override - public EvaluationMetricResult evaluate(ClassInfo classInfo, Aggregations aggs) { + public Optional getResult() { + return Optional.ofNullable(result); + } + + private String evaluatedLabelAggName(ClassInfo classInfo) { + return getName() + "_" + classInfo.getName(); + } + + private String restLabelsAggName(ClassInfo classInfo) { + return getName() + "_non_" + classInfo.getName(); + } + + private EvaluationMetricResult evaluate(ClassInfo classInfo, Aggregations aggs) { Filter classAgg = aggs.get(evaluatedLabelAggName(classInfo)); Filter restAgg = aggs.get(restLabelsAggName(classInfo)); double[] tpPercentiles = percentilesArray(classAgg.getAggregations().get(PERCENTILES), - "[" + getMetricName() + "] requires at least one actual_field to have the value [" + classInfo.getName() + "]"); + "[" + getName() + "] requires at least one actual_field to have the value [" + classInfo.getName() + "]"); double[] fpPercentiles = percentilesArray(restAgg.getAggregations().get(PERCENTILES), - "[" + getMetricName() + "] requires at least one actual_field to have a different value than [" + classInfo.getName() + "]"); + "[" + getName() + "] requires at least one actual_field to have a different value than [" + classInfo.getName() + "]"); List aucRocCurve = buildAucRocCurve(tpPercentiles, fpPercentiles); double aucRocScore = calculateAucScore(aucRocCurve); return new Result(aucRocScore, includeCurve ? aucRocCurve : Collections.emptyList()); @@ -326,7 +340,7 @@ public class AucRoc implements SoftClassificationMetric { } @Override - public String getName() { + public String getMetricName() { return NAME.getPreferredName(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/BinarySoftClassification.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/BinarySoftClassification.java index 20731eba5e83..30858107af08 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/BinarySoftClassification.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/BinarySoftClassification.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -14,18 +13,14 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; @@ -87,17 +82,16 @@ public class BinarySoftClassification implements Evaluation { if (metrics.isEmpty()) { throw ExceptionsHelper.badRequestException("[{}] must have one or more metrics", NAME.getPreferredName()); } - Collections.sort(metrics, Comparator.comparing(SoftClassificationMetric::getMetricName)); + Collections.sort(metrics, Comparator.comparing(SoftClassificationMetric::getName)); return metrics; } private static List defaultMetrics() { - List defaultMetrics = new ArrayList<>(4); - defaultMetrics.add(new AucRoc(false)); - defaultMetrics.add(new Precision(Arrays.asList(0.25, 0.5, 0.75))); - defaultMetrics.add(new Recall(Arrays.asList(0.25, 0.5, 0.75))); - defaultMetrics.add(new ConfusionMatrix(Arrays.asList(0.25, 0.5, 0.75))); - return defaultMetrics; + return Arrays.asList( + new AucRoc(false), + new Precision(Arrays.asList(0.25, 0.5, 0.75)), + new Recall(Arrays.asList(0.25, 0.5, 0.75)), + new ConfusionMatrix(Arrays.asList(0.25, 0.5, 0.75))); } public BinarySoftClassification(StreamInput in) throws IOException { @@ -126,7 +120,7 @@ public class BinarySoftClassification implements Evaluation { builder.startObject(METRICS.getPreferredName()); for (SoftClassificationMetric metric : metrics) { - builder.field(metric.getMetricName(), metric); + builder.field(metric.getName(), metric); } builder.endObject(); @@ -155,34 +149,34 @@ public class BinarySoftClassification implements Evaluation { } @Override - public SearchSourceBuilder buildSearch(QueryBuilder queryBuilder) { - BoolQueryBuilder boolQuery = QueryBuilders.boolQuery() - .filter(QueryBuilders.existsQuery(actualField)) - .filter(QueryBuilders.existsQuery(predictedProbabilityField)) - .filter(queryBuilder); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(0).query(boolQuery); + public List getMetrics() { + return metrics; + } + + @Override + public SearchSourceBuilder buildSearch(QueryBuilder userProvidedQueryBuilder) { + ExceptionsHelper.requireNonNull(userProvidedQueryBuilder, "userProvidedQueryBuilder"); + SearchSourceBuilder searchSourceBuilder = + newSearchSourceBuilder(List.of(actualField, predictedProbabilityField), userProvidedQueryBuilder); + BinaryClassInfo binaryClassInfo = new BinaryClassInfo(); for (SoftClassificationMetric metric : metrics) { - List aggs = metric.aggs(actualField, Collections.singletonList(new BinaryClassInfo())); + List aggs = metric.aggs(actualField, Collections.singletonList(binaryClassInfo)); aggs.forEach(searchSourceBuilder::aggregation); } return searchSourceBuilder; } @Override - public void evaluate(SearchResponse searchResponse, ActionListener> listener) { + public void process(SearchResponse searchResponse) { + ExceptionsHelper.requireNonNull(searchResponse, "searchResponse"); if (searchResponse.getHits().getTotalHits().value == 0) { - listener.onFailure(ExceptionsHelper.badRequestException("No documents found containing both [{}, {}] fields", actualField, - predictedProbabilityField)); - return; + throw ExceptionsHelper.badRequestException( + "No documents found containing both [{}, {}] fields", actualField, predictedProbabilityField); } - - List results = new ArrayList<>(); - Aggregations aggs = searchResponse.getAggregations(); BinaryClassInfo binaryClassInfo = new BinaryClassInfo(); for (SoftClassificationMetric metric : metrics) { - results.add(metric.evaluate(binaryClassInfo, aggs)); + metric.process(binaryClassInfo, searchResponse.getAggregations()); } - listener.onResponse(results); } private class BinaryClassInfo implements SoftClassificationMetric.ClassInfo { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/ConfusionMatrix.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/ConfusionMatrix.java index 54f245962d51..6fc05809245d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/ConfusionMatrix.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/ConfusionMatrix.java @@ -50,7 +50,7 @@ public class ConfusionMatrix extends AbstractConfusionMatrixMetric { } @Override - public String getMetricName() { + public String getName() { return NAME.getPreferredName(); } @@ -132,7 +132,7 @@ public class ConfusionMatrix extends AbstractConfusionMatrixMetric { } @Override - public String getName() { + public String getMetricName() { return NAME.getPreferredName(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/Precision.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/Precision.java index d38a52bb203e..a0fcda5f90c6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/Precision.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/Precision.java @@ -48,7 +48,7 @@ public class Precision extends AbstractConfusionMatrixMetric { } @Override - public String getMetricName() { + public String getName() { return NAME.getPreferredName(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/Recall.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/Recall.java index f7103aceedae..53b3f1a24a2f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/Recall.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/Recall.java @@ -48,7 +48,7 @@ public class Recall extends AbstractConfusionMatrixMetric { } @Override - public String getMetricName() { + public String getName() { return NAME.getPreferredName(); } @@ -68,7 +68,7 @@ public class Recall extends AbstractConfusionMatrixMetric { @Override protected List aggsAt(String actualField, List classInfos, double threshold) { List aggs = new ArrayList<>(); - for (ClassInfo classInfo: classInfos) { + for (ClassInfo classInfo : classInfos) { aggs.add(buildAgg(classInfo, threshold, Condition.TP)); aggs.add(buildAgg(classInfo, threshold, Condition.FN)); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/ScoreByThresholdResult.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/ScoreByThresholdResult.java index bd6b6e7db25a..0ad99a83cf25 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/ScoreByThresholdResult.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/ScoreByThresholdResult.java @@ -40,7 +40,7 @@ public class ScoreByThresholdResult implements EvaluationMetricResult { } @Override - public String getName() { + public String getMetricName() { return name; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/SoftClassificationMetric.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/SoftClassificationMetric.java index dfb256e9b52f..a5b072632c22 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/SoftClassificationMetric.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/SoftClassificationMetric.java @@ -5,16 +5,15 @@ */ package org.elasticsearch.xpack.core.ml.dataframe.evaluation.softclassification; -import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; +import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetric; import java.util.List; -public interface SoftClassificationMetric extends ToXContentObject, NamedWriteable { +public interface SoftClassificationMetric extends EvaluationMetric { /** * The information of a specific class @@ -37,11 +36,6 @@ public interface SoftClassificationMetric extends ToXContentObject, NamedWriteab String getProbabilityField(); } - /** - * Returns the name of the metric (which may differ to the writeable name) - */ - String getMetricName(); - /** * Builds the aggregation that collect required data to compute the metric * @param actualField the field that stores the actual class @@ -51,10 +45,9 @@ public interface SoftClassificationMetric extends ToXContentObject, NamedWriteab List aggs(String actualField, List classInfos); /** - * Calculates the metric result for a given class + * Processes given aggregations as a step towards computing result * @param classInfo the class to calculate the metric for - * @param aggs the aggregations - * @return the metric result + * @param aggs aggregations from {@link SearchResponse} */ - EvaluationMetricResult evaluate(ClassInfo classInfo, Aggregations aggs); + void process(ClassInfo classInfo, Aggregations aggs); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java new file mode 100644 index 000000000000..7f14077a1504 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/MlInferenceNamedXContentProvider.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.plugins.spi.NamedXContentProvider; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LenientlyParsedTrainedModel; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TrainedModel; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.StrictlyParsedTrainedModel; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree.Tree; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.FrequencyEncoding; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.LenientlyParsedPreProcessor; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.OneHotEncoding; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.PreProcessor; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.StrictlyParsedPreProcessor; +import org.elasticsearch.xpack.core.ml.inference.preprocessing.TargetMeanEncoding; + +import java.util.ArrayList; +import java.util.List; + +public class MlInferenceNamedXContentProvider implements NamedXContentProvider { + + @Override + public List getNamedXContentParsers() { + List namedXContent = new ArrayList<>(); + + // PreProcessing Lenient + namedXContent.add(new NamedXContentRegistry.Entry(LenientlyParsedPreProcessor.class, OneHotEncoding.NAME, + OneHotEncoding::fromXContentLenient)); + namedXContent.add(new NamedXContentRegistry.Entry(LenientlyParsedPreProcessor.class, TargetMeanEncoding.NAME, + TargetMeanEncoding::fromXContentLenient)); + namedXContent.add(new NamedXContentRegistry.Entry(LenientlyParsedPreProcessor.class, FrequencyEncoding.NAME, + FrequencyEncoding::fromXContentLenient)); + + // PreProcessing Strict + namedXContent.add(new NamedXContentRegistry.Entry(StrictlyParsedPreProcessor.class, OneHotEncoding.NAME, + OneHotEncoding::fromXContentStrict)); + namedXContent.add(new NamedXContentRegistry.Entry(StrictlyParsedPreProcessor.class, TargetMeanEncoding.NAME, + TargetMeanEncoding::fromXContentStrict)); + namedXContent.add(new NamedXContentRegistry.Entry(StrictlyParsedPreProcessor.class, FrequencyEncoding.NAME, + FrequencyEncoding::fromXContentStrict)); + + // Model Lenient + namedXContent.add(new NamedXContentRegistry.Entry(LenientlyParsedTrainedModel.class, Tree.NAME, Tree::fromXContentLenient)); + + // Model Strict + namedXContent.add(new NamedXContentRegistry.Entry(StrictlyParsedTrainedModel.class, Tree.NAME, Tree::fromXContentStrict)); + + return namedXContent; + } + + public List getNamedWriteables() { + List namedWriteables = new ArrayList<>(); + + // PreProcessing + namedWriteables.add(new NamedWriteableRegistry.Entry(PreProcessor.class, OneHotEncoding.NAME.getPreferredName(), + OneHotEncoding::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(PreProcessor.class, TargetMeanEncoding.NAME.getPreferredName(), + TargetMeanEncoding::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(PreProcessor.class, FrequencyEncoding.NAME.getPreferredName(), + FrequencyEncoding::new)); + + // Model + namedWriteables.add(new NamedWriteableRegistry.Entry(TrainedModel.class, Tree.NAME.getPreferredName(), Tree::new)); + + return namedWriteables; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/FrequencyEncoding.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/FrequencyEncoding.java new file mode 100644 index 000000000000..351c0f05960f --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/FrequencyEncoding.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + + +/** + * PreProcessor for frequency encoding a set of categorical values for a given field. + */ +public class FrequencyEncoding implements LenientlyParsedPreProcessor, StrictlyParsedPreProcessor { + + public static final ParseField NAME = new ParseField("frequency_encoding"); + public static final ParseField FIELD = new ParseField("field"); + public static final ParseField FEATURE_NAME = new ParseField("feature_name"); + public static final ParseField FREQUENCY_MAP = new ParseField("frequency_map"); + + public static final ConstructingObjectParser STRICT_PARSER = createParser(false); + public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + + @SuppressWarnings("unchecked") + private static ConstructingObjectParser createParser(boolean lenient) { + ConstructingObjectParser parser = new ConstructingObjectParser<>( + NAME.getPreferredName(), + lenient, + a -> new FrequencyEncoding((String)a[0], (String)a[1], (Map)a[2])); + parser.declareString(ConstructingObjectParser.constructorArg(), FIELD); + parser.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME); + parser.declareObject(ConstructingObjectParser.constructorArg(), + (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), + FREQUENCY_MAP); + return parser; + } + + public static FrequencyEncoding fromXContentStrict(XContentParser parser) { + return STRICT_PARSER.apply(parser, null); + } + + public static FrequencyEncoding fromXContentLenient(XContentParser parser) { + return LENIENT_PARSER.apply(parser, null); + } + + private final String field; + private final String featureName; + private final Map frequencyMap; + + public FrequencyEncoding(String field, String featureName, Map frequencyMap) { + this.field = ExceptionsHelper.requireNonNull(field, FIELD); + this.featureName = ExceptionsHelper.requireNonNull(featureName, FEATURE_NAME); + this.frequencyMap = Collections.unmodifiableMap(ExceptionsHelper.requireNonNull(frequencyMap, FREQUENCY_MAP)); + } + + public FrequencyEncoding(StreamInput in) throws IOException { + this.field = in.readString(); + this.featureName = in.readString(); + this.frequencyMap = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readDouble)); + } + + /** + * @return Field name on which to frequency encode + */ + public String getField() { + return field; + } + + /** + * @return Map of Value: frequency for the frequency encoding + */ + public Map getFrequencyMap() { + return frequencyMap; + } + + /** + * @return The encoded feature name + */ + public String getFeatureName() { + return featureName; + } + + @Override + public String getName() { + return NAME.getPreferredName(); + } + + @Override + public void process(Map fields) { + String value = (String)fields.get(field); + if (value == null) { + return; + } + fields.put(featureName, frequencyMap.getOrDefault(value, 0.0)); + } + + @Override + public String getWriteableName() { + return NAME.getPreferredName(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(field); + out.writeString(featureName); + out.writeMap(frequencyMap, StreamOutput::writeString, StreamOutput::writeDouble); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(FIELD.getPreferredName(), field); + builder.field(FEATURE_NAME.getPreferredName(), featureName); + builder.field(FREQUENCY_MAP.getPreferredName(), frequencyMap); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FrequencyEncoding that = (FrequencyEncoding) o; + return Objects.equals(field, that.field) + && Objects.equals(featureName, that.featureName) + && Objects.equals(frequencyMap, that.frequencyMap); + } + + @Override + public int hashCode() { + return Objects.hash(field, featureName, frequencyMap); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/LenientlyParsedPreProcessor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/LenientlyParsedPreProcessor.java new file mode 100644 index 000000000000..e4e8957420de --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/LenientlyParsedPreProcessor.java @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +/** + * To be used in conjunction with a lenient parser. + */ +public interface LenientlyParsedPreProcessor extends PreProcessor { +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/OneHotEncoding.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/OneHotEncoding.java new file mode 100644 index 000000000000..106cb1e26c1c --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/OneHotEncoding.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; + +/** + * PreProcessor for one hot encoding a set of categorical values for a given field. + */ +public class OneHotEncoding implements LenientlyParsedPreProcessor, StrictlyParsedPreProcessor { + + public static final ParseField NAME = new ParseField("one_hot_encoding"); + public static final ParseField FIELD = new ParseField("field"); + public static final ParseField HOT_MAP = new ParseField("hot_map"); + + public static final ConstructingObjectParser STRICT_PARSER = createParser(false); + public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + + @SuppressWarnings("unchecked") + private static ConstructingObjectParser createParser(boolean lenient) { + ConstructingObjectParser parser = new ConstructingObjectParser<>( + NAME.getPreferredName(), + lenient, + a -> new OneHotEncoding((String)a[0], (Map)a[1])); + parser.declareString(ConstructingObjectParser.constructorArg(), FIELD); + parser.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HOT_MAP); + return parser; + } + + public static OneHotEncoding fromXContentStrict(XContentParser parser) { + return STRICT_PARSER.apply(parser, null); + } + + public static OneHotEncoding fromXContentLenient(XContentParser parser) { + return LENIENT_PARSER.apply(parser, null); + } + + private final String field; + private final Map hotMap; + + public OneHotEncoding(String field, Map hotMap) { + this.field = ExceptionsHelper.requireNonNull(field, FIELD); + this.hotMap = Collections.unmodifiableMap(ExceptionsHelper.requireNonNull(hotMap, HOT_MAP)); + } + + public OneHotEncoding(StreamInput in) throws IOException { + this.field = in.readString(); + this.hotMap = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readString)); + } + + /** + * @return Field name on which to one hot encode + */ + public String getField() { + return field; + } + + /** + * @return Map of Value: ColumnName for the one hot encoding + */ + public Map getHotMap() { + return hotMap; + } + + @Override + public String getName() { + return NAME.getPreferredName(); + } + + @Override + public void process(Map fields) { + String value = (String)fields.get(field); + if (value == null) { + return; + } + hotMap.forEach((val, col) -> { + int encoding = value.equals(val) ? 1 : 0; + fields.put(col, encoding); + }); + } + + @Override + public String getWriteableName() { + return NAME.getPreferredName(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(field); + out.writeMap(hotMap, StreamOutput::writeString, StreamOutput::writeString); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(FIELD.getPreferredName(), field); + builder.field(HOT_MAP.getPreferredName(), hotMap); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OneHotEncoding that = (OneHotEncoding) o; + return Objects.equals(field, that.field) + && Objects.equals(hotMap, that.hotMap); + } + + @Override + public int hashCode() { + return Objects.hash(field, hotMap); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/PreProcessor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/PreProcessor.java new file mode 100644 index 000000000000..79e1ce16ad80 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/PreProcessor.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.xpack.core.ml.utils.NamedXContentObject; + +import java.util.Map; + +/** + * Describes a pre-processor for a defined machine learning model + * This processor should take a set of fields and return the modified set of fields. + */ +public interface PreProcessor extends NamedXContentObject, NamedWriteable { + + /** + * Process the given fields and their values and return the modified map. + * + * NOTE: The passed map object is mutated directly + * @param fields The fields and their values to process + */ + void process(Map fields); +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/StrictlyParsedPreProcessor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/StrictlyParsedPreProcessor.java new file mode 100644 index 000000000000..925a9b86b1dc --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/StrictlyParsedPreProcessor.java @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +/** + * To be used in conjunction with a strict parser. + */ +public interface StrictlyParsedPreProcessor extends PreProcessor { +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/TargetMeanEncoding.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/TargetMeanEncoding.java new file mode 100644 index 000000000000..ebce49db957e --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/TargetMeanEncoding.java @@ -0,0 +1,161 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + + +/** + * PreProcessor for target mean encoding a set of categorical values for a given field. + */ +public class TargetMeanEncoding implements LenientlyParsedPreProcessor, StrictlyParsedPreProcessor { + + public static final ParseField NAME = new ParseField("target_mean_encoding"); + public static final ParseField FIELD = new ParseField("field"); + public static final ParseField FEATURE_NAME = new ParseField("feature_name"); + public static final ParseField TARGET_MEANS = new ParseField("target_means"); + public static final ParseField DEFAULT_VALUE = new ParseField("default_value"); + + public static final ConstructingObjectParser STRICT_PARSER = createParser(false); + public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + + @SuppressWarnings("unchecked") + private static ConstructingObjectParser createParser(boolean lenient) { + ConstructingObjectParser parser = new ConstructingObjectParser<>( + NAME.getPreferredName(), + lenient, + a -> new TargetMeanEncoding((String)a[0], (String)a[1], (Map)a[2], (Double)a[3])); + parser.declareString(ConstructingObjectParser.constructorArg(), FIELD); + parser.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME); + parser.declareObject(ConstructingObjectParser.constructorArg(), + (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), + TARGET_MEANS); + parser.declareDouble(ConstructingObjectParser.constructorArg(), DEFAULT_VALUE); + return parser; + } + + public static TargetMeanEncoding fromXContentStrict(XContentParser parser) { + return STRICT_PARSER.apply(parser, null); + } + + public static TargetMeanEncoding fromXContentLenient(XContentParser parser) { + return LENIENT_PARSER.apply(parser, null); + } + + private final String field; + private final String featureName; + private final Map meanMap; + private final double defaultValue; + + public TargetMeanEncoding(String field, String featureName, Map meanMap, Double defaultValue) { + this.field = ExceptionsHelper.requireNonNull(field, FIELD); + this.featureName = ExceptionsHelper.requireNonNull(featureName, FEATURE_NAME); + this.meanMap = Collections.unmodifiableMap(ExceptionsHelper.requireNonNull(meanMap, TARGET_MEANS)); + this.defaultValue = ExceptionsHelper.requireNonNull(defaultValue, DEFAULT_VALUE); + } + + public TargetMeanEncoding(StreamInput in) throws IOException { + this.field = in.readString(); + this.featureName = in.readString(); + this.meanMap = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readDouble)); + this.defaultValue = in.readDouble(); + } + + /** + * @return Field name on which to target mean encode + */ + public String getField() { + return field; + } + + /** + * @return Map of Value: targetMean for the target mean encoding + */ + public Map getMeanMap() { + return meanMap; + } + + /** + * @return The default value to set when a previously unobserved value is seen + */ + public Double getDefaultValue() { + return defaultValue; + } + + /** + * @return The feature name for the encoded value + */ + public String getFeatureName() { + return featureName; + } + + @Override + public String getName() { + return NAME.getPreferredName(); + } + + @Override + public void process(Map fields) { + String value = (String)fields.get(field); + if (value == null) { + return; + } + fields.put(featureName, meanMap.getOrDefault(value, defaultValue)); + } + + @Override + public String getWriteableName() { + return NAME.getPreferredName(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(field); + out.writeString(featureName); + out.writeMap(meanMap, StreamOutput::writeString, StreamOutput::writeDouble); + out.writeDouble(defaultValue); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(FIELD.getPreferredName(), field); + builder.field(FEATURE_NAME.getPreferredName(), featureName); + builder.field(TARGET_MEANS.getPreferredName(), meanMap); + builder.field(DEFAULT_VALUE.getPreferredName(), defaultValue); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TargetMeanEncoding that = (TargetMeanEncoding) o; + return Objects.equals(field, that.field) + && Objects.equals(featureName, that.featureName) + && Objects.equals(meanMap, that.meanMap) + && Objects.equals(defaultValue, that.defaultValue); + } + + @Override + public int hashCode() { + return Objects.hash(field, featureName, meanMap, defaultValue); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LenientlyParsedTrainedModel.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LenientlyParsedTrainedModel.java new file mode 100644 index 000000000000..208e07de17b6 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LenientlyParsedTrainedModel.java @@ -0,0 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.trainedmodel; + +public interface LenientlyParsedTrainedModel extends TrainedModel { +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/StrictlyParsedTrainedModel.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/StrictlyParsedTrainedModel.java new file mode 100644 index 000000000000..48b38c161942 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/StrictlyParsedTrainedModel.java @@ -0,0 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.trainedmodel; + +public interface StrictlyParsedTrainedModel extends TrainedModel { +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TrainedModel.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TrainedModel.java new file mode 100644 index 000000000000..1d68e3d6d3f4 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TrainedModel.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.trainedmodel; + +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.xpack.core.ml.utils.NamedXContentObject; + +import java.util.List; +import java.util.Map; + +public interface TrainedModel extends NamedXContentObject, NamedWriteable { + + /** + * @return List of featureNames expected by the model. In the order that they are expected + */ + List getFeatureNames(); + + /** + * Infer against the provided fields + * + * @param fields The fields and their values to infer against + * @return The predicted value. For classification this will be discrete values (e.g. 0.0, or 1.0). + * For regression this is continuous. + */ + double infer(Map fields); + + /** + * @return {@code true} if the model is classification, {@code false} otherwise. + */ + boolean isClassification(); + + /** + * This gathers the probabilities for each potential classification value. + * + * This only should return if the implementation model is inferring classification values and not regression + * @param fields The fields and their values to infer against + * @return The probabilities of each classification value + */ + List inferProbabilities(Map fields); + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/Tree.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/Tree.java new file mode 100644 index 000000000000..8e48fa488a0a --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/Tree.java @@ -0,0 +1,311 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LenientlyParsedTrainedModel; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.StrictlyParsedTrainedModel; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Queue; +import java.util.Set; +import java.util.stream.Collectors; + +public class Tree implements LenientlyParsedTrainedModel, StrictlyParsedTrainedModel { + + public static final ParseField NAME = new ParseField("tree"); + + public static final ParseField FEATURE_NAMES = new ParseField("feature_names"); + public static final ParseField TREE_STRUCTURE = new ParseField("tree_structure"); + + private static final ObjectParser LENIENT_PARSER = createParser(true); + private static final ObjectParser STRICT_PARSER = createParser(false); + + private static ObjectParser createParser(boolean lenient) { + ObjectParser parser = new ObjectParser<>( + NAME.getPreferredName(), + lenient, + Tree.Builder::new); + parser.declareStringArray(Tree.Builder::setFeatureNames, FEATURE_NAMES); + parser.declareObjectArray(Tree.Builder::setNodes, (p, c) -> TreeNode.fromXContent(p, lenient), TREE_STRUCTURE); + return parser; + } + + public static Tree fromXContentStrict(XContentParser parser) { + return STRICT_PARSER.apply(parser, null).build(); + } + + public static Tree fromXContentLenient(XContentParser parser) { + return LENIENT_PARSER.apply(parser, null).build(); + } + + private final List featureNames; + private final List nodes; + + Tree(List featureNames, List nodes) { + this.featureNames = Collections.unmodifiableList(ExceptionsHelper.requireNonNull(featureNames, FEATURE_NAMES)); + this.nodes = Collections.unmodifiableList(ExceptionsHelper.requireNonNull(nodes, TREE_STRUCTURE)); + } + + public Tree(StreamInput in) throws IOException { + this.featureNames = Collections.unmodifiableList(in.readStringList()); + this.nodes = Collections.unmodifiableList(in.readList(TreeNode::new)); + } + + @Override + public String getName() { + return NAME.getPreferredName(); + } + + @Override + public List getFeatureNames() { + return featureNames; + } + + public List getNodes() { + return nodes; + } + + @Override + public double infer(Map fields) { + List features = featureNames.stream().map(f -> (Double) fields.get(f)).collect(Collectors.toList()); + return infer(features); + } + + private double infer(List features) { + TreeNode node = nodes.get(0); + while(node.isLeaf() == false) { + node = nodes.get(node.compare(features)); + } + return node.getLeafValue(); + } + + /** + * Trace the route predicting on the feature vector takes. + * @param features The feature vector + * @return The list of traversed nodes ordered from root to leaf + */ + public List trace(List features) { + List visited = new ArrayList<>(); + TreeNode node = nodes.get(0); + visited.add(node); + while(node.isLeaf() == false) { + node = nodes.get(node.compare(features)); + visited.add(node); + } + return visited; + } + + @Override + public boolean isClassification() { + return false; + } + + @Override + public List inferProbabilities(Map fields) { + throw new UnsupportedOperationException("Cannot infer probabilities against a regression model."); + } + + @Override + public String getWriteableName() { + return NAME.getPreferredName(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeStringCollection(featureNames); + out.writeCollection(nodes); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(FEATURE_NAMES.getPreferredName(), featureNames); + builder.field(TREE_STRUCTURE.getPreferredName(), nodes); + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return Strings.toString(this); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Tree that = (Tree) o; + return Objects.equals(featureNames, that.featureNames) + && Objects.equals(nodes, that.nodes); + } + + @Override + public int hashCode() { + return Objects.hash(featureNames, nodes); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private List featureNames; + private ArrayList nodes; + private int numNodes; + + public Builder() { + nodes = new ArrayList<>(); + // allocate space in the root node and set to a leaf + nodes.add(null); + addLeaf(0, 0.0); + numNodes = 1; + } + + public Builder setFeatureNames(List featureNames) { + this.featureNames = featureNames; + return this; + } + + public Builder addNode(TreeNode.Builder node) { + nodes.add(node); + return this; + } + + public Builder setNodes(List nodes) { + this.nodes = new ArrayList<>(nodes); + return this; + } + + public Builder setNodes(TreeNode.Builder... nodes) { + return setNodes(Arrays.asList(nodes)); + } + + /** + * Add a decision node. Space for the child nodes is allocated + * @param nodeIndex Where to place the node. This is either 0 (root) or an existing child node index + * @param featureIndex The feature index the decision is made on + * @param isDefaultLeft Default left branch if the feature is missing + * @param decisionThreshold The decision threshold + * @return The created node + */ + TreeNode.Builder addJunction(int nodeIndex, int featureIndex, boolean isDefaultLeft, double decisionThreshold) { + int leftChild = numNodes++; + int rightChild = numNodes++; + nodes.ensureCapacity(nodeIndex + 1); + for (int i = nodes.size(); i < nodeIndex + 1; i++) { + nodes.add(null); + } + + TreeNode.Builder node = TreeNode.builder(nodeIndex) + .setDefaultLeft(isDefaultLeft) + .setLeftChild(leftChild) + .setRightChild(rightChild) + .setSplitFeature(featureIndex) + .setThreshold(decisionThreshold); + nodes.set(nodeIndex, node); + + // allocate space for the child nodes + while (nodes.size() <= rightChild) { + nodes.add(null); + } + + return node; + } + + void detectCycle(List nodes) { + if (nodes.isEmpty()) { + return; + } + Set visited = new HashSet<>(); + Queue toVisit = new ArrayDeque<>(nodes.size()); + toVisit.add(0); + while(toVisit.isEmpty() == false) { + Integer nodeIdx = toVisit.remove(); + if (visited.contains(nodeIdx)) { + throw new IllegalArgumentException("[tree] contains cycle at node " + nodeIdx); + } + visited.add(nodeIdx); + TreeNode.Builder treeNode = nodes.get(nodeIdx); + if (treeNode.getLeftChild() != null) { + toVisit.add(treeNode.getLeftChild()); + } + if (treeNode.getRightChild() != null) { + toVisit.add(treeNode.getRightChild()); + } + } + } + + void detectNullOrMissingNode(List nodes) { + if (nodes.isEmpty()) { + return; + } + if (nodes.get(0) == null) { + throw new IllegalArgumentException("[tree] must have non-null root node."); + } + List nullOrMissingNodes = new ArrayList<>(); + for (int i = 0; i < nodes.size(); i++) { + TreeNode.Builder currentNode = nodes.get(i); + if (currentNode == null) { + continue; + } + if (nodeNullOrMissing(currentNode.getLeftChild())) { + nullOrMissingNodes.add(currentNode.getLeftChild()); + } + if (nodeNullOrMissing(currentNode.getRightChild())) { + nullOrMissingNodes.add(currentNode.getRightChild()); + } + } + if (nullOrMissingNodes.isEmpty() == false) { + throw new IllegalArgumentException("[tree] contains null or missing nodes " + nullOrMissingNodes); + } + } + + private boolean nodeNullOrMissing(Integer nodeIdx) { + if (nodeIdx == null) { + return false; + } + return nodeIdx >= nodes.size() || nodes.get(nodeIdx) == null; + } + + /** + * Sets the node at {@code nodeIndex} to a leaf node. + * @param nodeIndex The index as allocated by a call to {@link #addJunction(int, int, boolean, double)} + * @param value The prediction value + * @return this + */ + Tree.Builder addLeaf(int nodeIndex, double value) { + for (int i = nodes.size(); i < nodeIndex + 1; i++) { + nodes.add(null); + } + nodes.set(nodeIndex, TreeNode.builder(nodeIndex).setLeafValue(value)); + return this; + } + + public Tree build() { + detectNullOrMissingNode(nodes); + detectCycle(nodes); + return new Tree(featureNames, + nodes.stream().map(TreeNode.Builder::build).collect(Collectors.toList())); + } + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeNode.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeNode.java new file mode 100644 index 000000000000..f0dbb0617503 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeNode.java @@ -0,0 +1,346 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.job.config.Operator; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public class TreeNode implements ToXContentObject, Writeable { + + public static final String NAME = "tree_node"; + + public static final ParseField DECISION_TYPE = new ParseField("decision_type"); + public static final ParseField THRESHOLD = new ParseField("threshold"); + public static final ParseField LEFT_CHILD = new ParseField("left_child"); + public static final ParseField RIGHT_CHILD = new ParseField("right_child"); + public static final ParseField DEFAULT_LEFT = new ParseField("default_left"); + public static final ParseField SPLIT_FEATURE = new ParseField("split_feature"); + public static final ParseField NODE_INDEX = new ParseField("node_index"); + public static final ParseField SPLIT_GAIN = new ParseField("split_gain"); + public static final ParseField LEAF_VALUE = new ParseField("leaf_value"); + + private static final ObjectParser LENIENT_PARSER = createParser(true); + private static final ObjectParser STRICT_PARSER = createParser(false); + + private static ObjectParser createParser(boolean lenient) { + ObjectParser parser = new ObjectParser<>( + NAME, + lenient, + TreeNode.Builder::new); + parser.declareDouble(TreeNode.Builder::setThreshold, THRESHOLD); + parser.declareField(TreeNode.Builder::setOperator, + p -> Operator.fromString(p.text()), + DECISION_TYPE, + ObjectParser.ValueType.STRING); + parser.declareInt(TreeNode.Builder::setLeftChild, LEFT_CHILD); + parser.declareInt(TreeNode.Builder::setRightChild, RIGHT_CHILD); + parser.declareBoolean(TreeNode.Builder::setDefaultLeft, DEFAULT_LEFT); + parser.declareInt(TreeNode.Builder::setSplitFeature, SPLIT_FEATURE); + parser.declareInt(TreeNode.Builder::setNodeIndex, NODE_INDEX); + parser.declareDouble(TreeNode.Builder::setSplitGain, SPLIT_GAIN); + parser.declareDouble(TreeNode.Builder::setLeafValue, LEAF_VALUE); + return parser; + } + + public static TreeNode.Builder fromXContent(XContentParser parser, boolean lenient) { + return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); + } + + private final Operator operator; + private final Double threshold; + private final Integer splitFeature; + private final int nodeIndex; + private final Double splitGain; + private final Double leafValue; + private final boolean defaultLeft; + private final int leftChild; + private final int rightChild; + + + TreeNode(Operator operator, + Double threshold, + Integer splitFeature, + Integer nodeIndex, + Double splitGain, + Double leafValue, + Boolean defaultLeft, + Integer leftChild, + Integer rightChild) { + this.operator = operator == null ? Operator.LTE : operator; + this.threshold = threshold; + this.splitFeature = splitFeature; + this.nodeIndex = ExceptionsHelper.requireNonNull(nodeIndex, NODE_INDEX.getPreferredName()); + this.splitGain = splitGain; + this.leafValue = leafValue; + this.defaultLeft = defaultLeft == null ? false : defaultLeft; + this.leftChild = leftChild == null ? -1 : leftChild; + this.rightChild = rightChild == null ? -1 : rightChild; + } + + public TreeNode(StreamInput in) throws IOException { + operator = Operator.readFromStream(in); + threshold = in.readOptionalDouble(); + splitFeature = in.readOptionalInt(); + splitGain = in.readOptionalDouble(); + nodeIndex = in.readInt(); + leafValue = in.readOptionalDouble(); + defaultLeft = in.readBoolean(); + leftChild = in.readInt(); + rightChild = in.readInt(); + } + + + public Operator getOperator() { + return operator; + } + + public Double getThreshold() { + return threshold; + } + + public Integer getSplitFeature() { + return splitFeature; + } + + public Integer getNodeIndex() { + return nodeIndex; + } + + public Double getSplitGain() { + return splitGain; + } + + public Double getLeafValue() { + return leafValue; + } + + public boolean isDefaultLeft() { + return defaultLeft; + } + + public int getLeftChild() { + return leftChild; + } + + public int getRightChild() { + return rightChild; + } + + public boolean isLeaf() { + return leftChild < 1; + } + + public int compare(List features) { + if (isLeaf()) { + throw new IllegalArgumentException("cannot call compare against a leaf node."); + } + Double feature = features.get(splitFeature); + if (isMissing(feature)) { + return defaultLeft ? leftChild : rightChild; + } + return operator.test(feature, threshold) ? leftChild : rightChild; + } + + private boolean isMissing(Double feature) { + return feature == null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + operator.writeTo(out); + out.writeOptionalDouble(threshold); + out.writeOptionalInt(splitFeature); + out.writeOptionalDouble(splitGain); + out.writeInt(nodeIndex); + out.writeOptionalDouble(leafValue); + out.writeBoolean(defaultLeft); + out.writeInt(leftChild); + out.writeInt(rightChild); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + addOptionalField(builder, DECISION_TYPE, operator); + addOptionalField(builder, THRESHOLD, threshold); + addOptionalField(builder, SPLIT_FEATURE, splitFeature); + addOptionalField(builder, SPLIT_GAIN, splitGain); + builder.field(NODE_INDEX.getPreferredName(), nodeIndex); + addOptionalField(builder, LEAF_VALUE, leafValue); + builder.field(DEFAULT_LEFT.getPreferredName(), defaultLeft); + if (leftChild >= 0) { + builder.field(LEFT_CHILD.getPreferredName(), leftChild); + } + if (rightChild >= 0) { + builder.field(RIGHT_CHILD.getPreferredName(), rightChild); + } + builder.endObject(); + return builder; + } + + private void addOptionalField(XContentBuilder builder, ParseField field, Object value) throws IOException { + if (value != null) { + builder.field(field.getPreferredName(), value); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TreeNode that = (TreeNode) o; + return Objects.equals(operator, that.operator) + && Objects.equals(threshold, that.threshold) + && Objects.equals(splitFeature, that.splitFeature) + && Objects.equals(nodeIndex, that.nodeIndex) + && Objects.equals(splitGain, that.splitGain) + && Objects.equals(leafValue, that.leafValue) + && Objects.equals(defaultLeft, that.defaultLeft) + && Objects.equals(leftChild, that.leftChild) + && Objects.equals(rightChild, that.rightChild); + } + + @Override + public int hashCode() { + return Objects.hash(operator, + threshold, + splitFeature, + splitGain, + nodeIndex, + leafValue, + defaultLeft, + leftChild, + rightChild); + } + + @Override + public String toString() { + return Strings.toString(this); + } + + public static Builder builder(int nodeIndex) { + return new Builder(nodeIndex); + } + + public static class Builder { + private Operator operator; + private Double threshold; + private Integer splitFeature; + private int nodeIndex; + private Double splitGain; + private Double leafValue; + private Boolean defaultLeft; + private Integer leftChild; + private Integer rightChild; + + public Builder(int nodeIndex) { + this.nodeIndex = nodeIndex; + } + + private Builder() { + } + + public Builder setOperator(Operator operator) { + this.operator = operator; + return this; + } + + public Builder setThreshold(Double threshold) { + this.threshold = threshold; + return this; + } + + public Builder setSplitFeature(Integer splitFeature) { + this.splitFeature = splitFeature; + return this; + } + + public Builder setNodeIndex(Integer nodeIndex) { + this.nodeIndex = nodeIndex; + return this; + } + + public Builder setSplitGain(Double splitGain) { + this.splitGain = splitGain; + return this; + } + + public Builder setLeafValue(Double leafValue) { + this.leafValue = leafValue; + return this; + } + + public Builder setDefaultLeft(Boolean defaultLeft) { + this.defaultLeft = defaultLeft; + return this; + } + + public Builder setLeftChild(Integer leftChild) { + this.leftChild = leftChild; + return this; + } + + Integer getLeftChild() { + return leftChild; + } + + public Builder setRightChild(Integer rightChild) { + this.rightChild = rightChild; + return this; + } + + Integer getRightChild() { + return rightChild; + } + + public void validate() { + if (nodeIndex < 0) { + throw new IllegalArgumentException("[node_index] must be a non-negative integer."); + } + if (leftChild == null) { // leaf validations + if (leafValue == null) { + throw new IllegalArgumentException("[leaf_value] is required for a leaf node."); + } + } else { + if (leftChild < 0) { + throw new IllegalArgumentException("[left_child] must be a non-negative integer."); + } + if (rightChild != null && rightChild < 0) { + throw new IllegalArgumentException("[right_child] must be a non-negative integer."); + } + if (threshold == null) { + throw new IllegalArgumentException("[threshold] must exist for non-leaf node."); + } + } + } + + public TreeNode build() { + validate(); + return new TreeNode(operator, + threshold, + splitFeature, + nodeIndex, + splitGain, + leafValue, + defaultLeft, + leftChild, + rightChild); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/NamedXContentObject.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/NamedXContentObject.java new file mode 100644 index 000000000000..6f8f38787c63 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/NamedXContentObject.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.utils; + +import org.elasticsearch.common.xcontent.ToXContentObject; + +/** + * Simple interface for XContent Objects that are named. + * + * This affords more general handling when serializing and de-serializing this type of XContent when it is used in a NamedObjects + * parser. + */ +public interface NamedXContentObject extends ToXContentObject { + /** + * @return The name of the XContentObject that is to be serialized + */ + String getName(); +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java index 40782a10befa..2f9eba0c88b9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequest.java @@ -15,8 +15,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.support.MetadataUtils; import java.io.IOException; @@ -25,6 +27,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Set; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -67,6 +70,24 @@ public class PutRoleRequest extends ActionRequest implements WriteRequest clauses = new ArrayList<>(); - clauses.addAll(boolQueryBuilder.filter()); - clauses.addAll(boolQueryBuilder.must()); - clauses.addAll(boolQueryBuilder.mustNot()); - clauses.addAll(boolQueryBuilder.should()); - for (QueryBuilder clause : clauses) { - verifyRoleQuery(clause); - } - } else if (queryBuilder instanceof ConstantScoreQueryBuilder) { - verifyRoleQuery(((ConstantScoreQueryBuilder) queryBuilder).innerQuery()); - } else if (queryBuilder instanceof FunctionScoreQueryBuilder) { - verifyRoleQuery(((FunctionScoreQueryBuilder) queryBuilder).query()); - } else if (queryBuilder instanceof BoostingQueryBuilder) { - verifyRoleQuery(((BoostingQueryBuilder) queryBuilder).negativeQuery()); - verifyRoleQuery(((BoostingQueryBuilder) queryBuilder).positiveQuery()); - } - } - /** * Fall back validation that verifies that queries during rewrite don't use * the client to make remote calls. In the case of DLS this can cause a dead diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index 53a1dcda2a7e..23769a5f04de 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.core.security.authz.privilege; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsAction; @@ -37,6 +39,8 @@ import java.util.stream.Stream; * Translates cluster privilege names into concrete implementations */ public class ClusterPrivilegeResolver { + private static final Logger logger = LogManager.getLogger(ClusterPrivilegeResolver.class); + // shared automatons private static final Set ALL_SECURITY_PATTERN = Set.of("cluster:admin/xpack/security/*"); private static final Set MANAGE_SAML_PATTERN = Set.of("cluster:admin/xpack/security/saml/*", @@ -159,10 +163,12 @@ public class ClusterPrivilegeResolver { if (fixedPrivilege != null) { return fixedPrivilege; } - throw new IllegalArgumentException("unknown cluster privilege [" + name + "]. a privilege must be either " + + String errorMessage = "unknown cluster privilege [" + name + "]. a privilege must be either " + "one of the predefined cluster privilege names [" + Strings.collectionToCommaDelimitedString(VALUES.keySet()) + "] or a pattern over one of the available " + - "cluster actions"); + "cluster actions"; + logger.debug(errorMessage); + throw new IllegalArgumentException(errorMessage); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java index 32f90993d019..08b67396c600 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java @@ -5,6 +5,8 @@ */ package org.elasticsearch.xpack.core.security.authz.privilege; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.Automaton; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; @@ -38,6 +40,7 @@ import static org.elasticsearch.xpack.core.security.support.Automatons.patterns; import static org.elasticsearch.xpack.core.security.support.Automatons.unionAndMinimize; public final class IndexPrivilege extends Privilege { + private static final Logger logger = LogManager.getLogger(IndexPrivilege.class); private static final Automaton ALL_AUTOMATON = patterns("indices:*", "internal:transport/proxy/indices:*"); private static final Automaton READ_AUTOMATON = patterns("indices:data/read/*"); @@ -139,10 +142,12 @@ public final class IndexPrivilege extends Privilege { } else if (indexPrivilege != null) { automata.add(indexPrivilege.automaton); } else { - throw new IllegalArgumentException("unknown index privilege [" + part + "]. a privilege must be either " + - "one of the predefined fixed indices privileges [" + - Strings.collectionToCommaDelimitedString(VALUES.entrySet()) + "] or a pattern over one of the available index" + - " actions"); + String errorMessage = "unknown index privilege [" + part + "]. a privilege must be either " + + "one of the predefined fixed indices privileges [" + + Strings.collectionToCommaDelimitedString(VALUES.entrySet()) + "] or a pattern over one of the available index" + + " actions"; + logger.debug(errorMessage); + throw new IllegalArgumentException(errorMessage); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/support/DLSRoleQueryValidator.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/support/DLSRoleQueryValidator.java new file mode 100644 index 000000000000..1e4df7e8a4a9 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/support/DLSRoleQueryValidator.java @@ -0,0 +1,182 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.authz.support; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.BoostingQueryBuilder; +import org.elasticsearch.index.query.ConstantScoreQueryBuilder; +import org.elasticsearch.index.query.GeoShapeQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.user.User; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * This class helps in evaluating the query field if it is template, + * validating the query and checking if the query type is allowed to be used in DLS role query. + */ +public final class DLSRoleQueryValidator { + + private DLSRoleQueryValidator() { + } + + /** + * Validates the query field in the {@link RoleDescriptor.IndicesPrivileges} only if it is not a template query.
    + * It parses the query and builds the {@link QueryBuilder}, also checks if the query type is supported in DLS role query. + * + * @param indicesPrivileges {@link RoleDescriptor.IndicesPrivileges} + * @param xContentRegistry {@link NamedXContentRegistry} for finding named queries + */ + public static void validateQueryField(RoleDescriptor.IndicesPrivileges[] indicesPrivileges, + NamedXContentRegistry xContentRegistry) { + if (indicesPrivileges != null) { + for (int i = 0; i < indicesPrivileges.length; i++) { + BytesReference query = indicesPrivileges[i].getQuery(); + try { + if (query != null) { + if (isTemplateQuery(query, xContentRegistry)) { + // skip template query, this requires runtime information like 'User' information. + continue; + } + + evaluateAndVerifyRoleQuery(query.utf8ToString(), xContentRegistry); + } + } catch (ParsingException | IllegalArgumentException | IOException e) { + throw new ElasticsearchParseException("failed to parse field 'query' for indices [" + + Strings.arrayToCommaDelimitedString(indicesPrivileges[i].getIndices()) + + "] at index privilege [" + i + "] of role descriptor", e); + } + } + } + } + + private static boolean isTemplateQuery(BytesReference query, NamedXContentRegistry xContentRegistry) throws IOException { + try (XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry, + LoggingDeprecationHandler.INSTANCE, query.utf8ToString())) { + XContentParser.Token token = parser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new XContentParseException(parser.getTokenLocation(), "expected [" + XContentParser.Token.START_OBJECT + "] but " + + "found [" + token + "] instead"); + } + token = parser.nextToken(); + if (token != XContentParser.Token.FIELD_NAME) { + throw new XContentParseException(parser.getTokenLocation(), "expected [" + XContentParser.Token.FIELD_NAME + "] with " + + "value a query name or 'template' but found [" + token + "] instead"); + } + String fieldName = parser.currentName(); + if ("template".equals(fieldName)) { + return true; + } + } + + return false; + } + + /** + * Evaluates the query if it is a template and then validates the query by parsing + * and building the {@link QueryBuilder}. It also checks if the query type is + * supported in DLS role query. + * + * @param query {@link BytesReference} query field from the role + * @param scriptService {@link ScriptService} used for evaluation of a template query + * @param xContentRegistry {@link NamedXContentRegistry} for finding named queries + * @param user {@link User} used when evaluation a template query + * @return {@link QueryBuilder} if the query is valid and allowed, in case {@link RoleDescriptor.IndicesPrivileges} + * * does not have a query field then it returns {@code null}. + */ + @Nullable + public static QueryBuilder evaluateAndVerifyRoleQuery(BytesReference query, ScriptService scriptService, + NamedXContentRegistry xContentRegistry, User user) { + if (query != null) { + String templateResult = SecurityQueryTemplateEvaluator.evaluateTemplate(query.utf8ToString(), scriptService, + user); + try { + return evaluateAndVerifyRoleQuery(templateResult, xContentRegistry); + } catch (ElasticsearchParseException | ParsingException | XContentParseException | IOException e) { + throw new ElasticsearchParseException("failed to parse field 'query' from the role descriptor", e); + } + } + return null; + } + + @Nullable + private static QueryBuilder evaluateAndVerifyRoleQuery(String query, NamedXContentRegistry xContentRegistry) throws IOException { + if (query != null) { + try (XContentParser parser = XContentFactory.xContent(query).createParser(xContentRegistry, + LoggingDeprecationHandler.INSTANCE, query)) { + QueryBuilder queryBuilder = AbstractQueryBuilder.parseInnerQueryBuilder(parser); + verifyRoleQuery(queryBuilder); + return queryBuilder; + } + } + return null; + } + + /** + * Checks whether the role query contains queries we know can't be used as DLS role query. + * + * @param queryBuilder {@link QueryBuilder} for given query + */ + // pkg protected for testing + static void verifyRoleQuery(QueryBuilder queryBuilder) { + if (queryBuilder instanceof TermsQueryBuilder) { + TermsQueryBuilder termsQueryBuilder = (TermsQueryBuilder) queryBuilder; + if (termsQueryBuilder.termsLookup() != null) { + throw new IllegalArgumentException("terms query with terms lookup isn't supported as part of a role query"); + } + } else if (queryBuilder instanceof GeoShapeQueryBuilder) { + GeoShapeQueryBuilder geoShapeQueryBuilder = (GeoShapeQueryBuilder) queryBuilder; + if (geoShapeQueryBuilder.shape() == null) { + throw new IllegalArgumentException("geoshape query referring to indexed shapes isn't supported as part of a role query"); + } + } else if (queryBuilder.getName().equals("percolate")) { + // actually only if percolate query is referring to an existing document then this is problematic, + // a normal percolate query does work. However we can't check that here as this query builder is inside + // another module. So we don't allow the entire percolate query. I don't think users would ever use + // a percolate query as role query, so this restriction shouldn't prohibit anyone from using dls. + throw new IllegalArgumentException("percolate query isn't supported as part of a role query"); + } else if (queryBuilder.getName().equals("has_child")) { + throw new IllegalArgumentException("has_child query isn't supported as part of a role query"); + } else if (queryBuilder.getName().equals("has_parent")) { + throw new IllegalArgumentException("has_parent query isn't supported as part of a role query"); + } else if (queryBuilder instanceof BoolQueryBuilder) { + BoolQueryBuilder boolQueryBuilder = (BoolQueryBuilder) queryBuilder; + List clauses = new ArrayList<>(); + clauses.addAll(boolQueryBuilder.filter()); + clauses.addAll(boolQueryBuilder.must()); + clauses.addAll(boolQueryBuilder.mustNot()); + clauses.addAll(boolQueryBuilder.should()); + for (QueryBuilder clause : clauses) { + verifyRoleQuery(clause); + } + } else if (queryBuilder instanceof ConstantScoreQueryBuilder) { + verifyRoleQuery(((ConstantScoreQueryBuilder) queryBuilder).innerQuery()); + } else if (queryBuilder instanceof FunctionScoreQueryBuilder) { + verifyRoleQuery(((FunctionScoreQueryBuilder) queryBuilder).query()); + } else if (queryBuilder instanceof BoostingQueryBuilder) { + verifyRoleQuery(((BoostingQueryBuilder) queryBuilder).negativeQuery()); + verifyRoleQuery(((BoostingQueryBuilder) queryBuilder).positiveQuery()); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/support/SecurityQueryTemplateEvaluator.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/support/SecurityQueryTemplateEvaluator.java index 73a1d7fcde50..0fac10163423 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/support/SecurityQueryTemplateEvaluator.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/support/SecurityQueryTemplateEvaluator.java @@ -44,10 +44,8 @@ public final class SecurityQueryTemplateEvaluator { * @return resultant query string after compiling and executing the script. * If the source does not contain template then it will return the query * source without any modifications. - * @throws IOException thrown when there is any error parsing the query - * string. */ - public static String evaluateTemplate(final String querySource, final ScriptService scriptService, final User user) throws IOException { + public static String evaluateTemplate(final String querySource, final ScriptService scriptService, final User user) { // EMPTY is safe here because we never use namedObject try (XContentParser parser = XContentFactory.xContent(querySource).createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, querySource)) { @@ -76,6 +74,8 @@ public final class SecurityQueryTemplateEvaluator { } else { return querySource; } + } catch (IOException ioe) { + throw new ElasticsearchParseException("failed to parse query", ioe); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicy.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicy.java index e038d3bb6e3b..0ef7912c5801 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicy.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotLifecyclePolicy.java @@ -260,7 +260,7 @@ public class SnapshotLifecyclePolicy extends AbstractDiffable mergedConfiguration = new HashMap<>(configuration); mergedConfiguration.put("metadata", metadataWithAddedPolicyName); req.source(mergedConfiguration); - req.waitForCompletion(false); + req.waitForCompletion(true); return req; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformAction.java similarity index 93% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformsAction.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformAction.java index 033076658295..fcc17a5ceddf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformAction.java @@ -28,16 +28,16 @@ import java.util.List; import static org.elasticsearch.action.ValidateActions.addValidationError; -public class GetTransformsAction extends ActionType { +public class GetTransformAction extends ActionType { - public static final GetTransformsAction INSTANCE = new GetTransformsAction(); + public static final GetTransformAction INSTANCE = new GetTransformAction(); public static final String NAME = "cluster:monitor/data_frame/get"; private static final DeprecationLogger deprecationLogger = new DeprecationLogger( - LogManager.getLogger(GetTransformsAction.class)); + LogManager.getLogger(GetTransformAction.class)); - private GetTransformsAction() { - super(NAME, GetTransformsAction.Response::new); + private GetTransformAction() { + super(NAME, GetTransformAction.Response::new); } public static class Request extends AbstractGetResourcesRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java similarity index 96% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsAction.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java index 2cc8db182fe3..b8b1f23ba005 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java @@ -33,12 +33,12 @@ import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; -public class GetTransformsStatsAction extends ActionType { +public class GetTransformStatsAction extends ActionType { - public static final GetTransformsStatsAction INSTANCE = new GetTransformsStatsAction(); + public static final GetTransformStatsAction INSTANCE = new GetTransformStatsAction(); public static final String NAME = "cluster:monitor/data_frame/stats/get"; - public GetTransformsStatsAction() { - super(NAME, GetTransformsStatsAction.Response::new); + public GetTransformStatsAction() { + super(NAME, GetTransformStatsAction.Response::new); } public static class Request extends BaseTasksRequest { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java new file mode 100644 index 000000000000..dcf8707e87c3 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/persistence/TransformInternalIndexConstants.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.transform.transforms.persistence; + +public final class TransformInternalIndexConstants { + + /* Constants for internal indexes of the transform plugin + * (defined in core to provide wider access) + * + * Increase the version number for every mapping change, see TransformInternalIndex for details + * + * Together with increasing the version number please keep the following in sync: + * + * - XPackRestTestConstants + * - yaml tests under x-pack/qa/ + * + * (pro-tip: grep for the constant) + */ + + // internal index + public static final String INDEX_VERSION = "2"; + public static final String INDEX_PATTERN = ".data-frame-internal-"; + public static final String LATEST_INDEX_VERSIONED_NAME = INDEX_PATTERN + INDEX_VERSION; + public static final String LATEST_INDEX_NAME = LATEST_INDEX_VERSIONED_NAME; + public static final String INDEX_NAME_PATTERN = INDEX_PATTERN + "*"; + + // audit index + public static final String AUDIT_TEMPLATE_VERSION = "1"; + public static final String AUDIT_INDEX_PREFIX = ".data-frame-notifications-"; + public static final String AUDIT_INDEX = AUDIT_INDEX_PREFIX + AUDIT_TEMPLATE_VERSION; + + private TransformInternalIndexConstants() { + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleStats.java index fa018abc6c43..7b401cb4025d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleStats.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -71,7 +72,7 @@ public class SnapshotLifecycleStats implements Writeable, ToXContentObject { PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_FAILED); PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_TIMED_OUT); PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_TIME_MILLIS); - PARSER.declareNamedObjects(ConstructingObjectParser.constructorArg(), (p, c, n) -> SnapshotPolicyStats.parse(p, n), POLICY_STATS); + PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), SnapshotPolicyStats.PARSER, POLICY_STATS); } public SnapshotLifecycleStats() { @@ -213,23 +214,25 @@ public class SnapshotLifecycleStats implements Writeable, ToXContentObject { builder.field(RETENTION_TIME.getPreferredName(), retentionTime); builder.field(RETENTION_TIME_MILLIS.getPreferredName(), retentionTime.millis()); - Map metrics = getMetrics(); - long totalTaken = metrics.values().stream().mapToLong(s -> s.snapshotsTaken.count()).sum(); - long totalFailed = metrics.values().stream().mapToLong(s -> s.snapshotsFailed.count()).sum(); - long totalDeleted = metrics.values().stream().mapToLong(s -> s.snapshotsDeleted.count()).sum(); - long totalDeleteFailures = metrics.values().stream().mapToLong(s -> s.snapshotDeleteFailures.count()).sum(); + List metrics = getMetrics().values().stream() + .sorted(Comparator.comparing(SnapshotPolicyStats::getPolicyId)) // maintain a consistent order when serializing + .collect(Collectors.toList()); + long totalTaken = metrics.stream().mapToLong(s -> s.snapshotsTaken.count()).sum(); + long totalFailed = metrics.stream().mapToLong(s -> s.snapshotsFailed.count()).sum(); + long totalDeleted = metrics.stream().mapToLong(s -> s.snapshotsDeleted.count()).sum(); + long totalDeleteFailures = metrics.stream().mapToLong(s -> s.snapshotDeleteFailures.count()).sum(); builder.field(TOTAL_TAKEN.getPreferredName(), totalTaken); builder.field(TOTAL_FAILED.getPreferredName(), totalFailed); builder.field(TOTAL_DELETIONS.getPreferredName(), totalDeleted); builder.field(TOTAL_DELETION_FAILURES.getPreferredName(), totalDeleteFailures); - builder.startObject(POLICY_STATS.getPreferredName()); - for (Map.Entry policy : metrics.entrySet()) { - SnapshotPolicyStats perPolicyMetrics = policy.getValue(); - builder.startObject(perPolicyMetrics.policyId); - perPolicyMetrics.toXContent(builder, params); + + builder.startArray(POLICY_STATS.getPreferredName()); + for (SnapshotPolicyStats stats : metrics) { + builder.startObject(); + stats.toXContent(builder, params); builder.endObject(); } - builder.endObject(); + builder.endArray(); builder.endObject(); return builder; } @@ -268,22 +271,25 @@ public class SnapshotLifecycleStats implements Writeable, ToXContentObject { private final CounterMetric snapshotsDeleted = new CounterMetric(); private final CounterMetric snapshotDeleteFailures = new CounterMetric(); + public static final ParseField POLICY_ID = new ParseField("policy"); public static final ParseField SNAPSHOTS_TAKEN = new ParseField("snapshots_taken"); public static final ParseField SNAPSHOTS_FAILED = new ParseField("snapshots_failed"); public static final ParseField SNAPSHOTS_DELETED = new ParseField("snapshots_deleted"); public static final ParseField SNAPSHOT_DELETION_FAILURES = new ParseField("snapshot_deletion_failures"); - private static final ConstructingObjectParser PARSER = + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("snapshot_policy_stats", true, - (a, id) -> { - long taken = (long) a[0]; - long failed = (long) a[1]; - long deleted = (long) a[2]; - long deleteFailed = (long) a[3]; + a -> { + String id = (String) a[0]; + long taken = (long) a[1]; + long failed = (long) a[2]; + long deleted = (long) a[3]; + long deleteFailed = (long) a[4]; return new SnapshotPolicyStats(id, taken, failed, deleted, deleteFailed); }); static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), POLICY_ID); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_TAKEN); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_FAILED); PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_DELETED); @@ -310,8 +316,8 @@ public class SnapshotLifecycleStats implements Writeable, ToXContentObject { this.snapshotDeleteFailures.inc(in.readVLong()); } - public static SnapshotPolicyStats parse(XContentParser parser, String policyId) { - return PARSER.apply(parser, policyId); + public static SnapshotPolicyStats parse(XContentParser parser) { + return PARSER.apply(parser, null); } public SnapshotPolicyStats merge(SnapshotPolicyStats other) { @@ -339,6 +345,10 @@ public class SnapshotLifecycleStats implements Writeable, ToXContentObject { snapshotDeleteFailures.inc(); } + public String getPolicyId() { + return policyId; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(policyId); @@ -372,6 +382,7 @@ public class SnapshotLifecycleStats implements Writeable, ToXContentObject { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(SnapshotPolicyStats.POLICY_ID.getPreferredName(), policyId); builder.field(SnapshotPolicyStats.SNAPSHOTS_TAKEN.getPreferredName(), snapshotsTaken.count()); builder.field(SnapshotPolicyStats.SNAPSHOTS_FAILED.getPreferredName(), snapshotsFailed.count()); builder.field(SnapshotPolicyStats.SNAPSHOTS_DELETED.getPreferredName(), snapshotsDeleted.count()); diff --git a/x-pack/plugin/core/src/main/resources/monitoring-beats.json b/x-pack/plugin/core/src/main/resources/monitoring-beats.json index d9824f8c21c2..a39507ec8a4d 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-beats.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-beats.json @@ -246,6 +246,96 @@ }, "apm-server": { "properties": { + "acm": { + "properties": { + "request": { + "properties": { + "count": { + "type": "long" + } + } + }, + "response": { + "properties": { + "count": { + "type": "long" + }, + "errors": { + "properties": { + "validate": { + "type": "long" + }, + "internal": { + "type": "long" + }, + "queue": { + "type": "long" + }, + "count": { + "type": "long" + }, + "decode": { + "type": "long" + }, + "toolarge": { + "type": "long" + }, + "unavailable": { + "type": "long" + }, + "forbidden": { + "type": "long" + }, + "method": { + "type": "long" + }, + "notfound": { + "type": "long" + }, + "invalidquery": { + "type": "long" + }, + "ratelimit": { + "type": "long" + }, + "closed": { + "type": "long" + }, + "unauthorized": { + "type": "long" + } + } + }, + "valid": { + "properties": { + "notmodified": { + "type": "long" + }, + "count": { + "type": "long" + }, + "ok": { + "type": "long" + }, + "accepted": { + "type": "long" + } + } + }, + "unset": { + "type": "long" + }, + "request": { + "properties": { + "count": { + "type": "long" + } + } + } + } + } + } + }, "server": { "properties": { "request": { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/AbstractLicensesIntegrationTestCase.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/AbstractLicensesIntegrationTestCase.java index 58caf0c512b0..77e2df67af1d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/AbstractLicensesIntegrationTestCase.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/AbstractLicensesIntegrationTestCase.java @@ -85,16 +85,15 @@ public abstract class AbstractLicensesIntegrationTestCase extends ESIntegTestCas latch.await(); } - protected void assertLicenseActive(boolean active) throws InterruptedException { - boolean success = awaitBusy(() -> { + protected void assertLicenseActive(boolean active) throws Exception { + assertBusy(() -> { for (XPackLicenseState licenseState : internalCluster().getDataNodeInstances(XPackLicenseState.class)) { if (licenseState.isActive() == active) { - return true; + return; } } - return false; + fail("No data nodes have a license active state of [" + active + "]"); }); - assertTrue(success); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseServiceClusterTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseServiceClusterTests.java index 10a441526400..467a859b6c2d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseServiceClusterTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseServiceClusterTests.java @@ -169,16 +169,15 @@ public class LicenseServiceClusterTests extends AbstractLicensesIntegrationTestC assertLicenseActive(true); } - private void assertOperationMode(License.OperationMode operationMode) throws InterruptedException { - boolean success = awaitBusy(() -> { + private void assertOperationMode(License.OperationMode operationMode) throws Exception { + assertBusy(() -> { for (XPackLicenseState licenseState : internalCluster().getDataNodeInstances(XPackLicenseState.class)) { if (licenseState.getOperationMode() == operationMode) { - return true; + return; } } - return false; + fail("No data nodes found with operation mode [" + operationMode + "]"); }); - assertTrue(success); } private void writeCloudInternalMode(String mode) throws Exception { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotShardTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotShardTests.java index 65b85df36af0..a80f6010b026 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotShardTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/SourceOnlySnapshotShardTests.java @@ -95,8 +95,8 @@ public class SourceOnlySnapshotShardTests extends IndexShardTestCase { SourceOnlySnapshotRepository repository = new SourceOnlySnapshotRepository(createRepository()); repository.start(); try (Engine.IndexCommitRef snapshotRef = shard.acquireLastIndexCommit(true)) { - IndexShardSnapshotStatus indexShardSnapshotStatus = IndexShardSnapshotStatus.newInitializing(); - final PlainActionFuture future = PlainActionFuture.newFuture(); + IndexShardSnapshotStatus indexShardSnapshotStatus = IndexShardSnapshotStatus.newInitializing("-1"); + final PlainActionFuture future = PlainActionFuture.newFuture(); runAsSnapshot(shard.getThreadPool(), () -> repository.snapshotShard(shard.store(), shard.mapperService(), snapshotId, indexId, snapshotRef.getIndexCommit(), indexShardSnapshotStatus, future)); IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, future::actionGet); @@ -117,14 +117,15 @@ public class SourceOnlySnapshotShardTests extends IndexShardTestCase { IndexId indexId = new IndexId(shard.shardId().getIndexName(), shard.shardId().getIndex().getUUID()); SourceOnlySnapshotRepository repository = new SourceOnlySnapshotRepository(createRepository()); repository.start(); - int totalFileCount = -1; + int totalFileCount; + String shardGeneration; try (Engine.IndexCommitRef snapshotRef = shard.acquireLastIndexCommit(true)) { - IndexShardSnapshotStatus indexShardSnapshotStatus = IndexShardSnapshotStatus.newInitializing(); + IndexShardSnapshotStatus indexShardSnapshotStatus = IndexShardSnapshotStatus.newInitializing(null); SnapshotId snapshotId = new SnapshotId("test", "test"); - final PlainActionFuture future = PlainActionFuture.newFuture(); + final PlainActionFuture future = PlainActionFuture.newFuture(); runAsSnapshot(shard.getThreadPool(), () -> repository.snapshotShard(shard.store(), shard.mapperService(), snapshotId, indexId, snapshotRef.getIndexCommit(), indexShardSnapshotStatus, future)); - future.actionGet(); + shardGeneration = future.actionGet(); IndexShardSnapshotStatus.Copy copy = indexShardSnapshotStatus.asCopy(); assertEquals(copy.getTotalFileCount(), copy.getIncrementalFileCount()); totalFileCount = copy.getTotalFileCount(); @@ -136,11 +137,11 @@ public class SourceOnlySnapshotShardTests extends IndexShardTestCase { try (Engine.IndexCommitRef snapshotRef = shard.acquireLastIndexCommit(true)) { SnapshotId snapshotId = new SnapshotId("test_1", "test_1"); - IndexShardSnapshotStatus indexShardSnapshotStatus = IndexShardSnapshotStatus.newInitializing(); - final PlainActionFuture future = PlainActionFuture.newFuture(); + IndexShardSnapshotStatus indexShardSnapshotStatus = IndexShardSnapshotStatus.newInitializing(shardGeneration); + final PlainActionFuture future = PlainActionFuture.newFuture(); runAsSnapshot(shard.getThreadPool(), () -> repository.snapshotShard(shard.store(), shard.mapperService(), snapshotId, indexId, snapshotRef.getIndexCommit(), indexShardSnapshotStatus, future)); - future.actionGet(); + shardGeneration = future.actionGet(); IndexShardSnapshotStatus.Copy copy = indexShardSnapshotStatus.asCopy(); // we processed the segments_N file plus _1.si, _1.fdx, _1.fnm, _1.fdt assertEquals(5, copy.getIncrementalFileCount()); @@ -152,8 +153,8 @@ public class SourceOnlySnapshotShardTests extends IndexShardTestCase { try (Engine.IndexCommitRef snapshotRef = shard.acquireLastIndexCommit(true)) { SnapshotId snapshotId = new SnapshotId("test_2", "test_2"); - IndexShardSnapshotStatus indexShardSnapshotStatus = IndexShardSnapshotStatus.newInitializing(); - final PlainActionFuture future = PlainActionFuture.newFuture(); + IndexShardSnapshotStatus indexShardSnapshotStatus = IndexShardSnapshotStatus.newInitializing(shardGeneration); + final PlainActionFuture future = PlainActionFuture.newFuture(); runAsSnapshot(shard.getThreadPool(), () -> repository.snapshotShard(shard.store(), shard.mapperService(), snapshotId, indexId, snapshotRef.getIndexCommit(), indexShardSnapshotStatus, future)); future.actionGet(); @@ -199,8 +200,8 @@ public class SourceOnlySnapshotShardTests extends IndexShardTestCase { SourceOnlySnapshotRepository repository = new SourceOnlySnapshotRepository(createRepository()); repository.start(); try (Engine.IndexCommitRef snapshotRef = shard.acquireLastIndexCommit(true)) { - IndexShardSnapshotStatus indexShardSnapshotStatus = IndexShardSnapshotStatus.newInitializing(); - final PlainActionFuture future = PlainActionFuture.newFuture(); + IndexShardSnapshotStatus indexShardSnapshotStatus = IndexShardSnapshotStatus.newInitializing(null); + final PlainActionFuture future = PlainActionFuture.newFuture(); runAsSnapshot(shard.getThreadPool(), () -> { repository.snapshotShard(shard.store(), shard.mapperService(), snapshotId, indexId, snapshotRef.getIndexCommit(), indexShardSnapshotStatus, future); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleOriginationDateParserTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleOriginationDateParserTests.java new file mode 100644 index 000000000000..520a316aaee4 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleOriginationDateParserTests.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ilm; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.test.ESTestCase; + +import java.text.ParseException; + +import static org.elasticsearch.xpack.core.ilm.IndexLifecycleOriginationDateParser.parseIndexNameAndExtractDate; +import static org.elasticsearch.xpack.core.ilm.IndexLifecycleOriginationDateParser.shouldParseIndexName; +import static org.hamcrest.Matchers.is; + +public class IndexLifecycleOriginationDateParserTests extends ESTestCase { + + private static final DateFormatter dateFormatter = DateFormatter.forPattern("yyyy.MM.dd"); + + public void testShouldParseIndexNameReturnsFalseWhenOriginationDateIsSet() { + Settings settings = Settings.builder() + .put(LifecycleSettings.LIFECYCLE_ORIGINATION_DATE, 1L) + .build(); + assertThat(shouldParseIndexName(settings), is(false)); + } + + public void testShouldParseIndexNameReturnsFalseIfParseOriginationDateIsDisabled() { + Settings settings = Settings.builder() + .put(LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, false) + .build(); + assertThat(shouldParseIndexName(settings), is(false)); + } + + public void testShouldParseIndexNameReturnsTrueIfParseOriginationDateIsTrueAndOriginationDateIsNotSet() { + Settings settings = Settings.builder() + .put(LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, true) + .build(); + assertThat(shouldParseIndexName(settings), is(true)); + } + + public void testParseIndexNameThatMatchesExpectedFormat() throws ParseException { + long expectedDate = dateFormatter.parseMillis("2019.09.04"); + { + long parsedDate = parseIndexNameAndExtractDate("indexName-2019.09.04"); + assertThat("indexName-yyyy.MM.dd is a valid index format", parsedDate, is(expectedDate)); + } + + { + long parsedDate = parseIndexNameAndExtractDate("indexName-2019.09.04-0000001"); + assertThat("indexName-yyyy.MM.dd-\\d+$ is a valid index format", parsedDate, is(expectedDate)); + } + + { + long parsedDate = parseIndexNameAndExtractDate("indexName-2019.09.04-2019.09.24"); + long secondDateInIndexName = dateFormatter.parseMillis("2019.09.24"); + assertThat("indexName-yyyy.MM.dd-yyyy.MM.dd is a valid index format and the second date should be parsed", + parsedDate, is(secondDateInIndexName)); + } + + { + long parsedDate = parseIndexNameAndExtractDate("index-2019.09.04-2019.09.24-00002"); + long secondDateInIndexName = dateFormatter.parseMillis("2019.09.24"); + assertThat("indexName-yyyy.MM.dd-yyyy.MM.dd-digits is a valid index format and the second date should be parsed", + parsedDate, is(secondDateInIndexName)); + } + } + + public void testParseIndexNameThrowsIllegalArgumentExceptionForInvalidIndexFormat() { + expectThrows( + IllegalArgumentException.class, + "plainIndexName does not match the expected pattern", + () -> parseIndexNameAndExtractDate("plainIndexName") + ); + + expectThrows( + IllegalArgumentException.class, + "indexName--00001 does not match the expected pattern as the origination date is missing", + () -> parseIndexNameAndExtractDate("indexName--00001") + ); + + expectThrows( + IllegalArgumentException.class, + "indexName-00001 does not match the expected pattern as the origination date is missing", + () -> parseIndexNameAndExtractDate("indexName-00001") + ); + + expectThrows( + IllegalArgumentException.class, + "indexName_2019.09.04_00001 does not match the expected pattern as _ is not the expected delimiter", + () -> parseIndexNameAndExtractDate("indexName_2019.09.04_00001") + ); + } + + public void testParseIndexNameThrowsIllegalArgumentExceptionForInvalidDateFormat() { + expectThrows( + IllegalArgumentException.class, + "indexName-2019.04-00001 does not match the expected pattern as the date does not conform with the yyyy.MM.dd pattern", + () -> parseIndexNameAndExtractDate("indexName-2019.04-00001") + ); + + expectThrows( + IllegalArgumentException.class, + "java.lang.IllegalArgumentException: failed to parse date field [2019.09.44] with format [yyyy.MM.dd]", + () -> parseIndexNameAndExtractDate("index-2019.09.44") + ); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java index 423ae1297a5a..487bf5a7a434 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java @@ -331,9 +331,10 @@ public class AsyncTwoPhaseIndexerTests extends ESTestCase { assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); assertThat(indexer.getState(), equalTo(IndexerState.INDEXING)); - assertTrue(awaitBusy(() -> indexer.getPosition() == 2)); + assertBusy(() -> assertThat(indexer.getPosition(), equalTo(2))); + countDownLatch.countDown(); - assertTrue(awaitBusy(() -> isFinished.get())); + assertBusy(() -> assertTrue(isFinished.get())); assertThat(indexer.getPosition(), equalTo(3)); assertFalse(isStopped.get()); @@ -347,24 +348,24 @@ public class AsyncTwoPhaseIndexerTests extends ESTestCase { } } - public void testStateMachineBrokenSearch() throws InterruptedException { + public void testStateMachineBrokenSearch() throws Exception { AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); final ExecutorService executor = Executors.newFixedThreadPool(1); - try { + try { MockIndexerThrowsFirstSearch indexer = new MockIndexerThrowsFirstSearch(executor, state, 2); indexer.start(); + assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); - assertTrue(awaitBusy(() -> isFinished.get(), 10000, TimeUnit.SECONDS)); + assertBusy(() -> assertTrue(isFinished.get()), 10000, TimeUnit.SECONDS); assertThat(indexer.getStep(), equalTo(3)); - } finally { executor.shutdownNow(); } } - public void testStop_WhileIndexing() throws InterruptedException { + public void testStop_WhileIndexing() throws Exception { AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); final ExecutorService executor = Executors.newFixedThreadPool(1); try { @@ -378,14 +379,14 @@ public class AsyncTwoPhaseIndexerTests extends ESTestCase { countDownLatch.countDown(); assertThat(indexer.getPosition(), equalTo(2)); - assertTrue(awaitBusy(() -> isStopped.get())); + assertBusy(() -> assertTrue(isStopped.get())); assertFalse(isFinished.get()); } finally { executor.shutdownNow(); } } - public void testFiveRuns() throws InterruptedException { + public void testFiveRuns() throws Exception { AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); final ExecutorService executor = Executors.newFixedThreadPool(1); try { @@ -393,7 +394,7 @@ public class AsyncTwoPhaseIndexerTests extends ESTestCase { indexer.start(); assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); - assertTrue(awaitBusy(() -> isFinished.get())); + assertBusy(() -> assertTrue(isFinished.get())); indexer.assertCounters(); } finally { executor.shutdownNow(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredErrorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredErrorTests.java index a22c499220ce..2516b2fea94a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredErrorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/MeanSquaredErrorTests.java @@ -49,8 +49,9 @@ public class MeanSquaredErrorTests extends AbstractSerializingTestCase { )); RSquared rSquared = new RSquared(); - EvaluationMetricResult result = rSquared.evaluate(aggs); + rSquared.process(aggs); + EvaluationMetricResult result = rSquared.getResult().get(); String expected = "{\"value\":0.9348643947690524}"; assertThat(Strings.toString(result), equalTo(expected)); } @@ -67,35 +68,48 @@ public class RSquaredTests extends AbstractSerializingTestCase { )); RSquared rSquared = new RSquared(); - EvaluationMetricResult result = rSquared.evaluate(aggs); + rSquared.process(aggs); + + EvaluationMetricResult result = rSquared.getResult().get(); assertThat(result, equalTo(new RSquared.Result(0.0))); } public void testEvaluate_GivenMissingAggs() { - EvaluationMetricResult zeroResult = new RSquared.Result(0.0); Aggregations aggs = new Aggregations(Collections.singletonList( createSingleMetricAgg("some_other_single_metric_agg", 0.2377) )); RSquared rSquared = new RSquared(); - EvaluationMetricResult result = rSquared.evaluate(aggs); - assertThat(result, equalTo(zeroResult)); + rSquared.process(aggs); - aggs = new Aggregations(Arrays.asList( + EvaluationMetricResult result = rSquared.getResult().get(); + assertThat(result, equalTo(new RSquared.Result(0.0))); + } + + public void testEvaluate_GivenMissingExtendedStatsAgg() { + Aggregations aggs = new Aggregations(Arrays.asList( createSingleMetricAgg("some_other_single_metric_agg", 0.2377), createSingleMetricAgg("residual_sum_of_squares", 0.2377) )); - result = rSquared.evaluate(aggs); - assertThat(result, equalTo(zeroResult)); + RSquared rSquared = new RSquared(); + rSquared.process(aggs); - aggs = new Aggregations(Arrays.asList( + EvaluationMetricResult result = rSquared.getResult().get(); + assertThat(result, equalTo(new RSquared.Result(0.0))); + } + + public void testEvaluate_GivenMissingResidualSumOfSquaresAgg() { + Aggregations aggs = new Aggregations(Arrays.asList( createSingleMetricAgg("some_other_single_metric_agg", 0.2377), createExtendedStatsAgg("extended_stats_actual",100, 50) )); - result = rSquared.evaluate(aggs); - assertThat(result, equalTo(zeroResult)); + RSquared rSquared = new RSquared(); + rSquared.process(aggs); + + EvaluationMetricResult result = rSquared.getResult().get(); + assertThat(result, equalTo(new RSquared.Result(0.0))); } private static NumericMetricsAggregation.SingleValue createSingleMetricAgg(String name, double value) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RegressionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RegressionTests.java index 7f089ab18cd9..077998b66aed 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RegressionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/regression/RegressionTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider; @@ -22,6 +23,7 @@ import java.util.Collections; import java.util.List; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; public class RegressionTests extends AbstractSerializingTestCase { @@ -43,13 +45,7 @@ public class RegressionTests extends AbstractSerializingTestCase { if (randomBoolean()) { metrics.add(RSquaredTests.createRandom()); } - return new Regression(randomAlphaOfLength(10), - randomAlphaOfLength(10), - randomBoolean() ? - null : - metrics.isEmpty() ? - null : - metrics); + return new Regression(randomAlphaOfLength(10), randomAlphaOfLength(10), metrics.isEmpty() ? null : metrics); } @Override @@ -74,7 +70,6 @@ public class RegressionTests extends AbstractSerializingTestCase { } public void testBuildSearch() { - Regression evaluation = new Regression("act", "prob", Arrays.asList(new MeanSquaredError())); QueryBuilder userProvidedQuery = QueryBuilders.boolQuery() .filter(QueryBuilders.termQuery("field_A", "some-value")) @@ -82,10 +77,15 @@ public class RegressionTests extends AbstractSerializingTestCase { QueryBuilder expectedSearchQuery = QueryBuilders.boolQuery() .filter(QueryBuilders.existsQuery("act")) - .filter(QueryBuilders.existsQuery("prob")) + .filter(QueryBuilders.existsQuery("pred")) .filter(QueryBuilders.boolQuery() .filter(QueryBuilders.termQuery("field_A", "some-value")) .filter(QueryBuilders.termQuery("field_B", "some-other-value"))); - assertThat(evaluation.buildSearch(userProvidedQuery).query(), equalTo(expectedSearchQuery)); + + Regression evaluation = new Regression("act", "pred", Arrays.asList(new MeanSquaredError())); + + SearchSourceBuilder searchSourceBuilder = evaluation.buildSearch(userProvidedQuery); + assertThat(searchSourceBuilder.query(), equalTo(expectedSearchQuery)); + assertThat(searchSourceBuilder.aggregations().count(), greaterThan(0)); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/BinarySoftClassificationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/BinarySoftClassificationTests.java index 6a589c0d055c..e63e88f6f848 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/BinarySoftClassificationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/softclassification/BinarySoftClassificationTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider; @@ -22,6 +23,7 @@ import java.util.Collections; import java.util.List; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; public class BinarySoftClassificationTests extends AbstractSerializingTestCase { @@ -81,7 +83,6 @@ public class BinarySoftClassificationTests extends AbstractSerializingTestCase { + + static class NamedObjectContainer implements ToXContentObject { + + static ParseField PRE_PROCESSORS = new ParseField("pre_processors"); + static ParseField TRAINED_MODEL = new ParseField("trained_model"); + + static final ObjectParser STRICT_PARSER = createParser(false); + static final ObjectParser LENIENT_PARSER = createParser(true); + + @SuppressWarnings("unchecked") + private static ObjectParser createParser(boolean lenient) { + ObjectParser parser = new ObjectParser<>( + "named_xcontent_object_container_test", + lenient, + NamedObjectContainer::new); + parser.declareNamedObjects(NamedObjectContainer::setPreProcessors, + (p, c, n) -> + lenient ? p.namedObject(LenientlyParsedPreProcessor.class, n, null) : + p.namedObject(StrictlyParsedPreProcessor.class, n, null), + (noc) -> noc.setUseExplicitPreprocessorOrder(true), PRE_PROCESSORS); + parser.declareNamedObjects(NamedObjectContainer::setTrainedModel, + (p, c, n) -> + lenient ? p.namedObject(LenientlyParsedTrainedModel.class, n, null) : + p.namedObject(StrictlyParsedTrainedModel.class, n, null), + TRAINED_MODEL); + return parser; + } + + private boolean useExplicitPreprocessorOrder = false; + private List preProcessors; + private TrainedModel trainedModel; + + void setPreProcessors(List preProcessors) { + this.preProcessors = preProcessors; + } + + void setTrainedModel(List trainedModel) { + this.trainedModel = trainedModel.get(0); + } + + void setModel(TrainedModel trainedModel) { + this.trainedModel = trainedModel; + } + + void setUseExplicitPreprocessorOrder(boolean value) { + this.useExplicitPreprocessorOrder = value; + } + + static NamedObjectContainer fromXContent(XContentParser parser, boolean lenient) { + return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + writeNamedObjects(builder, params, useExplicitPreprocessorOrder, PRE_PROCESSORS.getPreferredName(), preProcessors); + writeNamedObjects(builder, params, false, TRAINED_MODEL.getPreferredName(), Collections.singletonList(trainedModel)); + builder.endObject(); + return builder; + } + + XContentBuilder writeNamedObjects(XContentBuilder builder, + Params params, + boolean useExplicitOrder, + String namedObjectsName, + List namedObjects) throws IOException { + if (useExplicitOrder) { + builder.startArray(namedObjectsName); + } else { + builder.startObject(namedObjectsName); + } + for (NamedXContentObject object : namedObjects) { + if (useExplicitOrder) { + builder.startObject(); + } + builder.field(object.getName(), object, params); + if (useExplicitOrder) { + builder.endObject(); + } + } + if (useExplicitOrder) { + builder.endArray(); + } else { + builder.endObject(); + } + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NamedObjectContainer that = (NamedObjectContainer) o; + return Objects.equals(preProcessors, that.preProcessors) && Objects.equals(trainedModel, that.trainedModel); + } + + @Override + public int hashCode() { + return Objects.hash(preProcessors); + } + } + + private boolean lenient; + + @Before + public void chooseStrictOrLenient() { + lenient = randomBoolean(); + } + + @Override + public NamedObjectContainer createTestInstance() { + int max = randomIntBetween(1, 10); + List preProcessors = new ArrayList<>(max); + for (int i = 0; i < max; i++) { + preProcessors.add(randomFrom(FrequencyEncodingTests.createRandom(), + OneHotEncodingTests.createRandom(), + TargetMeanEncodingTests.createRandom())); + } + NamedObjectContainer container = new NamedObjectContainer(); + container.setPreProcessors(preProcessors); + container.setUseExplicitPreprocessorOrder(true); + container.setModel(TreeTests.buildRandomTree(5, 4)); + return container; + } + + @Override + protected NamedObjectContainer doParseInstance(XContentParser parser) throws IOException { + return NamedObjectContainer.fromXContent(parser, lenient); + } + + @Override + protected boolean supportsUnknownFields() { + return lenient; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // We only want to add random fields to the root, or the root of the named objects + return field -> + (field.endsWith("frequency_encoding") || + field.endsWith("one_hot_encoding") || + field.endsWith("target_mean_encoding") || + field.startsWith("tree.tree_structure") || + field.isEmpty()) == false; + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + List namedXContent = new ArrayList<>(); + namedXContent.addAll(new MlInferenceNamedXContentProvider().getNamedXContentParsers()); + namedXContent.addAll(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedXContents()); + return new NamedXContentRegistry(namedXContent); + } +} + diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/FrequencyEncodingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/FrequencyEncodingTests.java new file mode 100644 index 000000000000..72047178e9f5 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/FrequencyEncodingTests.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.hamcrest.Matcher; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.equalTo; + +public class FrequencyEncodingTests extends PreProcessingTests { + + @Override + protected FrequencyEncoding doParseInstance(XContentParser parser) throws IOException { + return lenient ? FrequencyEncoding.fromXContentLenient(parser) : FrequencyEncoding.fromXContentStrict(parser); + } + + @Override + protected FrequencyEncoding createTestInstance() { + return createRandom(); + } + + public static FrequencyEncoding createRandom() { + int valuesSize = randomIntBetween(1, 10); + Map valueMap = new HashMap<>(); + for (int i = 0; i < valuesSize; i++) { + valueMap.put(randomAlphaOfLength(10), randomDoubleBetween(0.0, 1.0, false)); + } + return new FrequencyEncoding(randomAlphaOfLength(10), randomAlphaOfLength(10), valueMap); + } + + @Override + protected Writeable.Reader instanceReader() { + return FrequencyEncoding::new; + } + + public void testProcessWithFieldPresent() { + String field = "categorical"; + List values = Arrays.asList("foo", "bar", "foobar", "baz", "farequote"); + Map valueMap = values.stream().collect(Collectors.toMap(Function.identity(), + v -> randomDoubleBetween(0.0, 1.0, false))); + String encodedFeatureName = "encoded"; + FrequencyEncoding encoding = new FrequencyEncoding(field, encodedFeatureName, valueMap); + String fieldValue = randomFrom(values); + Map> matchers = Collections.singletonMap(encodedFeatureName, equalTo(valueMap.get(fieldValue))); + Map fieldValues = randomFieldValues(field, fieldValue); + testProcess(encoding, fieldValues, matchers); + + // Test where the value is some unknown Value + fieldValues = randomFieldValues(field, "unknownValue"); + fieldValues.put(field, "unknownValue"); + matchers = Collections.singletonMap(encodedFeatureName, equalTo(0.0)); + testProcess(encoding, fieldValues, matchers); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/OneHotEncodingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/OneHotEncodingTests.java new file mode 100644 index 000000000000..f0627719ec47 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/OneHotEncodingTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.hamcrest.Matcher; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.equalTo; + +public class OneHotEncodingTests extends PreProcessingTests { + + @Override + protected OneHotEncoding doParseInstance(XContentParser parser) throws IOException { + return lenient ? OneHotEncoding.fromXContentLenient(parser) : OneHotEncoding.fromXContentStrict(parser); + } + + @Override + protected OneHotEncoding createTestInstance() { + return createRandom(); + } + + public static OneHotEncoding createRandom() { + int valuesSize = randomIntBetween(1, 10); + Map valueMap = new HashMap<>(); + for (int i = 0; i < valuesSize; i++) { + valueMap.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); + } + return new OneHotEncoding(randomAlphaOfLength(10), valueMap); + } + + @Override + protected Writeable.Reader instanceReader() { + return OneHotEncoding::new; + } + + public void testProcessWithFieldPresent() { + String field = "categorical"; + List values = Arrays.asList("foo", "bar", "foobar", "baz", "farequote"); + Map valueMap = values.stream().collect(Collectors.toMap(Function.identity(), v -> "Column_" + v)); + OneHotEncoding encoding = new OneHotEncoding(field, valueMap); + String fieldValue = randomFrom(values); + Map fieldValues = randomFieldValues(field, fieldValue); + + Map> matchers = values.stream().map(v -> "Column_" + v) + .collect(Collectors.toMap( + Function.identity(), + v -> v.equals("Column_" + fieldValue) ? equalTo(1) : equalTo(0))); + + fieldValues.put(field, fieldValue); + testProcess(encoding, fieldValues, matchers); + + // Test where the value is some unknown Value + fieldValues = randomFieldValues(field, "unknownValue"); + matchers.put("Column_" + fieldValue, equalTo(0)); + testProcess(encoding, fieldValues, matchers); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/PreProcessingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/PreProcessingTests.java new file mode 100644 index 000000000000..4301b09c5ece --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/PreProcessingTests.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.hamcrest.Matcher; +import org.junit.Before; + +import java.util.HashMap; +import java.util.Map; +import java.util.function.Predicate; + +import static org.hamcrest.Matchers.equalTo; + +public abstract class PreProcessingTests extends AbstractSerializingTestCase { + + protected boolean lenient; + + @Before + public void chooseStrictOrLenient() { + lenient = randomBoolean(); + } + + @Override + protected boolean supportsUnknownFields() { + return lenient; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> !field.isEmpty(); + } + + void testProcess(PreProcessor preProcessor, Map fieldValues, Map> assertions) { + preProcessor.process(fieldValues); + assertions.forEach((fieldName, matcher) -> + assertThat(fieldValues.get(fieldName), matcher) + ); + } + + public void testWithMissingField() { + Map fields = randomFieldValues(); + PreProcessor preProcessor = this.createTestInstance(); + Map fieldsCopy = new HashMap<>(fields); + preProcessor.process(fields); + assertThat(fieldsCopy, equalTo(fields)); + } + + Map randomFieldValues() { + int numFields = randomIntBetween(1, 5); + Map fieldValues = new HashMap<>(numFields); + for (int k = 0; k < numFields; k++) { + fieldValues.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); + } + return fieldValues; + } + + Map randomFieldValues(String categoricalField, String catigoricalValue) { + Map fieldValues = randomFieldValues(); + fieldValues.put(categoricalField, catigoricalValue); + return fieldValues; + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/TargetMeanEncodingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/TargetMeanEncodingTests.java new file mode 100644 index 000000000000..d86d9e09f023 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/TargetMeanEncodingTests.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.preprocessing; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.hamcrest.Matcher; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.equalTo; + +public class TargetMeanEncodingTests extends PreProcessingTests { + + @Override + protected TargetMeanEncoding doParseInstance(XContentParser parser) throws IOException { + return lenient ? TargetMeanEncoding.fromXContentLenient(parser) : TargetMeanEncoding.fromXContentStrict(parser); + } + + @Override + protected TargetMeanEncoding createTestInstance() { + return createRandom(); + } + + public static TargetMeanEncoding createRandom() { + int valuesSize = randomIntBetween(1, 10); + Map valueMap = new HashMap<>(); + for (int i = 0; i < valuesSize; i++) { + valueMap.put(randomAlphaOfLength(10), randomDoubleBetween(0.0, 1.0, false)); + } + return new TargetMeanEncoding(randomAlphaOfLength(10), + randomAlphaOfLength(10), + valueMap, + randomDoubleBetween(0.0, 1.0, false)); + } + + @Override + protected Writeable.Reader instanceReader() { + return TargetMeanEncoding::new; + } + + public void testProcessWithFieldPresent() { + String field = "categorical"; + List values = Arrays.asList("foo", "bar", "foobar", "baz", "farequote"); + Map valueMap = values.stream().collect(Collectors.toMap(Function.identity(), + v -> randomDoubleBetween(0.0, 1.0, false))); + String encodedFeatureName = "encoded"; + Double defaultvalue = randomDouble(); + TargetMeanEncoding encoding = new TargetMeanEncoding(field, encodedFeatureName, valueMap, defaultvalue); + String fieldValue = randomFrom(values); + Map> matchers = Collections.singletonMap(encodedFeatureName, equalTo(valueMap.get(fieldValue))); + Map fieldValues = randomFieldValues(field, fieldValue); + testProcess(encoding, fieldValues, matchers); + + // Test where the value is some unknown Value + fieldValues = randomFieldValues(field, "unknownValue"); + matchers = Collections.singletonMap(encodedFeatureName, equalTo(defaultvalue)); + testProcess(encoding, fieldValues, matchers); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeNodeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeNodeTests.java new file mode 100644 index 000000000000..dd87270b95fc --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeNodeTests.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.core.ml.job.config.Operator; +import org.junit.Before; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class TreeNodeTests extends AbstractSerializingTestCase { + + private boolean lenient; + + @Before + public void chooseStrictOrLenient() { + lenient = randomBoolean(); + } + + @Override + protected TreeNode doParseInstance(XContentParser parser) throws IOException { + return TreeNode.fromXContent(parser, lenient).build(); + } + + @Override + protected boolean supportsUnknownFields() { + return lenient; + } + + @Override + protected TreeNode createTestInstance() { + Integer lft = randomBoolean() ? null : randomInt(100); + Integer rgt = randomBoolean() ? randomInt(100) : null; + Double threshold = lft != null || randomBoolean() ? randomDouble() : null; + Integer featureIndex = lft != null || randomBoolean() ? randomInt(100) : null; + return createRandom(randomInt(100), + lft, + rgt, + threshold, + featureIndex, + randomBoolean() ? null : randomFrom(Operator.values())).build(); + } + + public static TreeNode createRandomLeafNode(double internalValue) { + return TreeNode.builder(randomInt(100)) + .setDefaultLeft(randomBoolean() ? null : randomBoolean()) + .setLeafValue(internalValue) + .build(); + } + + public static TreeNode.Builder createRandom(int nodeId, + Integer left, + Integer right, + Double threshold, + Integer featureIndex, + Operator operator) { + return TreeNode.builder(nodeId) + .setLeafValue(left == null ? randomDouble() : null) + .setDefaultLeft(randomBoolean() ? null : randomBoolean()) + .setLeftChild(left) + .setRightChild(right) + .setThreshold(threshold) + .setOperator(operator) + .setSplitFeature(randomBoolean() ? null : randomInt()) + .setSplitGain(randomBoolean() ? null : randomDouble()) + .setSplitFeature(featureIndex); + } + + @Override + protected Writeable.Reader instanceReader() { + return TreeNode::new; + } + + public void testCompare() { + expectThrows(IllegalArgumentException.class, + () -> createRandomLeafNode(randomDouble()).compare(Collections.singletonList(randomDouble()))); + + List featureValues = Arrays.asList(0.1, null); + assertThat(createRandom(0, 2, 3, 0.0, 0, null).build().compare(featureValues), + equalTo(3)); + assertThat(createRandom(0, 2, 3, 0.0, 0, Operator.GT).build().compare(featureValues), + equalTo(2)); + assertThat(createRandom(0, 2, 3, 0.2, 0, null).build().compare(featureValues), + equalTo(2)); + assertThat(createRandom(0, 2, 3, 0.0, 1, null).setDefaultLeft(true).build().compare(featureValues), + equalTo(2)); + assertThat(createRandom(0, 2, 3, 0.0, 1, null).setDefaultLeft(false).build().compare(featureValues), + equalTo(3)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeTests.java new file mode 100644 index 000000000000..391f2e4b7e59 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/tree/TreeTests.java @@ -0,0 +1,172 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.inference.trainedmodel.tree; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.equalTo; + + +public class TreeTests extends AbstractSerializingTestCase { + + private boolean lenient; + + @Before + public void chooseStrictOrLenient() { + lenient = randomBoolean(); + } + + @Override + protected Tree doParseInstance(XContentParser parser) throws IOException { + return lenient ? Tree.fromXContentLenient(parser) : Tree.fromXContentStrict(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return lenient; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> field.startsWith("feature_names"); + } + + + @Override + protected Tree createTestInstance() { + return createRandom(); + } + + public static Tree createRandom() { + return buildRandomTree(randomIntBetween(2, 15), 6); + } + + public static Tree buildRandomTree(int numFeatures, int depth) { + + Tree.Builder builder = Tree.builder(); + List featureNames = new ArrayList<>(numFeatures); + for(int i = 0; i < numFeatures; i++) { + featureNames.add(randomAlphaOfLength(10)); + } + builder.setFeatureNames(featureNames); + + TreeNode.Builder node = builder.addJunction(0, randomInt(numFeatures), true, randomDouble()); + List childNodes = List.of(node.getLeftChild(), node.getRightChild()); + + for (int i = 0; i < depth -1; i++) { + + List nextNodes = new ArrayList<>(); + for (int nodeId : childNodes) { + if (i == depth -2) { + builder.addLeaf(nodeId, randomDouble()); + } else { + TreeNode.Builder childNode = + builder.addJunction(nodeId, randomInt(numFeatures), true, randomDouble()); + nextNodes.add(childNode.getLeftChild()); + nextNodes.add(childNode.getRightChild()); + } + } + childNodes = nextNodes; + } + + return builder.build(); + } + + @Override + protected Writeable.Reader instanceReader() { + return Tree::new; + } + + public void testInfer() { + // Build a tree with 2 nodes and 3 leaves using 2 features + // The leaves have unique values 0.1, 0.2, 0.3 + Tree.Builder builder = Tree.builder(); + TreeNode.Builder rootNode = builder.addJunction(0, 0, true, 0.5); + builder.addLeaf(rootNode.getRightChild(), 0.3); + TreeNode.Builder leftChildNode = builder.addJunction(rootNode.getLeftChild(), 1, true, 0.8); + builder.addLeaf(leftChildNode.getLeftChild(), 0.1); + builder.addLeaf(leftChildNode.getRightChild(), 0.2); + + List featureNames = Arrays.asList("foo", "bar"); + Tree tree = builder.setFeatureNames(featureNames).build(); + + // This feature vector should hit the right child of the root node + List featureVector = Arrays.asList(0.6, 0.0); + Map featureMap = zipObjMap(featureNames, featureVector); + assertEquals(0.3, tree.infer(featureMap), 0.00001); + + // This should hit the left child of the left child of the root node + // i.e. it takes the path left, left + featureVector = Arrays.asList(0.3, 0.7); + featureMap = zipObjMap(featureNames, featureVector); + assertEquals(0.1, tree.infer(featureMap), 0.00001); + + // This should hit the right child of the left child of the root node + // i.e. it takes the path left, right + featureVector = Arrays.asList(0.3, 0.9); + featureMap = zipObjMap(featureNames, featureVector); + assertEquals(0.2, tree.infer(featureMap), 0.00001); + } + + public void testTreeWithNullRoot() { + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> Tree.builder().setNodes(Collections.singletonList(null)) + .build()); + assertThat(ex.getMessage(), equalTo("[tree] must have non-null root node.")); + } + + public void testTreeWithInvalidNode() { + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> Tree.builder().setNodes(TreeNode.builder(0) + .setLeftChild(1) + .setSplitFeature(1) + .setThreshold(randomDouble())) + .build()); + assertThat(ex.getMessage(), equalTo("[tree] contains null or missing nodes [1]")); + } + + public void testTreeWithNullNode() { + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> Tree.builder().setNodes(TreeNode.builder(0) + .setLeftChild(1) + .setSplitFeature(1) + .setThreshold(randomDouble()), + null) + .build()); + assertThat(ex.getMessage(), equalTo("[tree] contains null or missing nodes [1]")); + } + + public void testTreeWithCycle() { + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> Tree.builder().setNodes(TreeNode.builder(0) + .setLeftChild(1) + .setSplitFeature(1) + .setThreshold(randomDouble()), + TreeNode.builder(0) + .setLeftChild(0) + .setSplitFeature(1) + .setThreshold(randomDouble())) + .build()); + assertThat(ex.getMessage(), equalTo("[tree] contains cycle at node 0")); + } + + private static Map zipObjMap(List keys, List values) { + return IntStream.range(0, keys.size()).boxed().collect(Collectors.toMap(keys::get, values::get)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java index f53f4c967dcf..b057547f2a3e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java @@ -38,6 +38,42 @@ import static org.hamcrest.Matchers.nullValue; public class PutRoleRequestTests extends ESTestCase { + public void testValidationErrorWithUnknownClusterPrivilegeName() { + final PutRoleRequest request = new PutRoleRequest(); + request.name(randomAlphaOfLengthBetween(4, 9)); + String unknownClusterPrivilegeName = "unknown_" + randomAlphaOfLengthBetween(3,9); + request.cluster("manage_security", unknownClusterPrivilegeName); + + // Fail + assertValidationError("unknown cluster privilege [" + unknownClusterPrivilegeName.toLowerCase(Locale.ROOT) + "]", request); + } + + public void testValidationSuccessWithCorrectClusterPrivilegeName() { + final PutRoleRequest request = new PutRoleRequest(); + request.name(randomAlphaOfLengthBetween(4, 9)); + request.cluster("manage_security", "manage", "cluster:admin/xpack/security/*"); + assertSuccessfulValidation(request); + } + + public void testValidationErrorWithUnknownIndexPrivilegeName() { + final PutRoleRequest request = new PutRoleRequest(); + request.name(randomAlphaOfLengthBetween(4, 9)); + String unknownIndexPrivilegeName = "unknown_" + randomAlphaOfLengthBetween(3,9); + request.addIndex(new String[]{randomAlphaOfLength(5)}, new String[]{"index", unknownIndexPrivilegeName}, null, + null, null, randomBoolean()); + + // Fail + assertValidationError("unknown index privilege [" + unknownIndexPrivilegeName.toLowerCase(Locale.ROOT) + "]", request); + } + + public void testValidationSuccessWithCorrectIndexPrivilegeName() { + final PutRoleRequest request = new PutRoleRequest(); + request.name(randomAlphaOfLengthBetween(4, 9)); + request.addIndex(new String[]{randomAlphaOfLength(5)}, new String[]{"index", "write", "indices:data/read"}, null, + null, null, randomBoolean()); + assertSuccessfulValidation(request); + } + public void testValidationOfApplicationPrivileges() { assertSuccessfulValidation(buildRequestWithApplicationPrivilege("app", new String[]{"read"}, new String[]{"*"})); assertSuccessfulValidation(buildRequestWithApplicationPrivilege("app", new String[]{"action:login"}, new String[]{"/"})); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java index ca49e4ae4a3e..ca2b38318a06 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java @@ -28,12 +28,10 @@ import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.license.XPackLicenseState; @@ -55,9 +53,7 @@ import static java.util.Collections.singleton; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; -import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; -import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; @@ -76,9 +72,6 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT final Authentication authentication = mock(Authentication.class); when(authentication.getUser()).thenReturn(mock(User.class)); threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); - IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true, new - FieldPermissions(), - DocumentPermissions.filteredBy(singleton(new BytesArray("{\"match_all\" : {}}")))); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(shardId.getIndex(), Settings.EMPTY); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -90,14 +83,6 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY); XPackLicenseState licenseState = mock(XPackLicenseState.class); when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true); - SecurityIndexReaderWrapper wrapper = new SecurityIndexReaderWrapper(s -> queryShardContext, - bitsetCache, threadContext, licenseState, scriptService) { - - @Override - protected IndicesAccessControl getIndicesAccessControl() { - return new IndicesAccessControl(true, singletonMap("_index", indexAccessControl)); - } - }; Directory directory = newDirectory(); IndexWriter iw = new IndexWriter( @@ -144,17 +129,32 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId); for (int i = 0; i < numValues; i++) { + String termQuery = "{\"term\": {\"field\": \""+ values[i] + "\"} }"; + IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(true, new + FieldPermissions(), + DocumentPermissions.filteredBy(singleton(new BytesArray(termQuery)))); + SecurityIndexReaderWrapper wrapper = new SecurityIndexReaderWrapper(s -> queryShardContext, + bitsetCache, threadContext, licenseState, scriptService) { + + @Override + protected IndicesAccessControl getIndicesAccessControl() { + return new IndicesAccessControl(true, singletonMap("_index", indexAccessControl)); + } + }; + ParsedQuery parsedQuery = new ParsedQuery(new TermQuery(new Term("field", values[i]))); - doReturn(new TermQueryBuilder("field", values[i])).when(queryShardContext).parseInnerQueryBuilder(any(XContentParser.class)); when(queryShardContext.toQuery(new TermsQueryBuilder("field", values[i]))).thenReturn(parsedQuery); + DirectoryReader wrappedDirectoryReader = wrapper.apply(directoryReader); IndexSearcher indexSearcher = new ContextIndexSearcher(wrappedDirectoryReader, IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy()); int expectedHitCount = valuesHitCount[i]; logger.info("Going to verify hit count with query [{}] with expected total hits [{}]", parsedQuery.query(), expectedHitCount); + TotalHitCountCollector countCollector = new TotalHitCountCollector(); indexSearcher.search(new MatchAllDocsQuery(), countCollector); + assertThat(countCollector.getTotalHits(), equalTo(expectedHitCount)); assertThat(wrappedDirectoryReader.numDocs(), equalTo(expectedHitCount)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissionsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissionsTests.java index f8d1334df7e4..f47476979e30 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissionsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/DocumentPermissionsTests.java @@ -6,23 +6,14 @@ package org.elasticsearch.xpack.core.security.authz.permission; -import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.BoostingQueryBuilder; -import org.elasticsearch.index.query.ConstantScoreQueryBuilder; -import org.elasticsearch.index.query.GeoShapeQueryBuilder; -import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.TermsQueryBuilder; -import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.indices.TermsLookup; -import org.elasticsearch.join.query.HasChildQueryBuilder; -import org.elasticsearch.join.query.HasParentQueryBuilder; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -68,44 +59,6 @@ public class DocumentPermissionsTests extends ESTestCase { assertThat(ae.getMessage(), containsString("nested scoping for document permissions is not permitted")); } - public void testVerifyRoleQuery() throws Exception { - QueryBuilder queryBuilder1 = new TermsQueryBuilder("field", "val1", "val2"); - DocumentPermissions.verifyRoleQuery(queryBuilder1); - - QueryBuilder queryBuilder2 = new TermsQueryBuilder("field", new TermsLookup("_index", "_type", "_id", "_path")); - Exception e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder2)); - assertThat(e.getMessage(), equalTo("terms query with terms lookup isn't supported as part of a role query")); - - QueryBuilder queryBuilder3 = new GeoShapeQueryBuilder("field", "_id", "_type"); - e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder3)); - assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query")); - - QueryBuilder queryBuilder4 = new HasChildQueryBuilder("_type", new MatchAllQueryBuilder(), ScoreMode.None); - e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder4)); - assertThat(e.getMessage(), equalTo("has_child query isn't support as part of a role query")); - - QueryBuilder queryBuilder5 = new HasParentQueryBuilder("_type", new MatchAllQueryBuilder(), false); - e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder5)); - assertThat(e.getMessage(), equalTo("has_parent query isn't support as part of a role query")); - - QueryBuilder queryBuilder6 = new BoolQueryBuilder().must(new GeoShapeQueryBuilder("field", "_id", "_type")); - e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder6)); - assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query")); - - QueryBuilder queryBuilder7 = new ConstantScoreQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type")); - e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder7)); - assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query")); - - QueryBuilder queryBuilder8 = new FunctionScoreQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type")); - e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder8)); - assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query")); - - QueryBuilder queryBuilder9 = new BoostingQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type"), - new MatchAllQueryBuilder()); - e = expectThrows(IllegalArgumentException.class, () -> DocumentPermissions.verifyRoleQuery(queryBuilder9)); - assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't support as part of a role query")); - } - public void testFailIfQueryUsesClient() throws Exception { Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -115,7 +68,7 @@ public class DocumentPermissionsTests extends ESTestCase { QueryBuilder queryBuilder1 = new TermsQueryBuilder("field", "val1", "val2"); DocumentPermissions.failIfQueryUsesClient(queryBuilder1, context); - QueryBuilder queryBuilder2 = new TermsQueryBuilder("field", new TermsLookup("_index", "_type", "_id", "_path")); + QueryBuilder queryBuilder2 = new TermsQueryBuilder("field", new TermsLookup("_index", "_id", "_path")); Exception e = expectThrows(IllegalStateException.class, () -> DocumentPermissions.failIfQueryUsesClient(queryBuilder2, context)); assertThat(e.getMessage(), equalTo("role queries are not allowed to execute additional requests")); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index f1e7f1a9bfe4..9c23def4283c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -133,12 +133,13 @@ import org.elasticsearch.xpack.core.security.user.RemoteMonitoringUser; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.XPackUser; import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction; import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction; import org.elasticsearch.xpack.core.transform.action.PutTransformAction; import org.elasticsearch.xpack.core.transform.action.StartTransformAction; import org.elasticsearch.xpack.core.transform.action.StopTransformAction; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.core.watcher.execution.TriggeredWatchStoreField; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchAction; @@ -1130,8 +1131,8 @@ public class ReservedRolesStoreTests extends ESTestCase { Role role = Role.builder(roleDescriptor, null).build(); assertThat(role.cluster().check(DeleteTransformAction.NAME, request, authentication), is(true)); - assertThat(role.cluster().check(GetTransformsAction.NAME, request, authentication), is(true)); - assertThat(role.cluster().check(GetTransformsStatsAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(GetTransformAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(GetTransformStatsAction.NAME, request, authentication), is(true)); assertThat(role.cluster().check(PreviewTransformAction.NAME, request, authentication), is(true)); assertThat(role.cluster().check(PutTransformAction.NAME, request, authentication), is(true)); assertThat(role.cluster().check(StartTransformAction.NAME, request, authentication), is(true)); @@ -1140,9 +1141,9 @@ public class ReservedRolesStoreTests extends ESTestCase { assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); - assertOnlyReadAllowed(role, ".data-frame-notifications-1"); + assertOnlyReadAllowed(role, TransformInternalIndexConstants.AUDIT_INDEX); assertNoAccessAllowed(role, "foo"); - assertNoAccessAllowed(role, ".data-frame-internal-1"); // internal use only + assertNoAccessAllowed(role, TransformInternalIndexConstants.LATEST_INDEX_NAME); // internal use only assertNoAccessAllowed(role, RestrictedIndicesNames.RESTRICTED_NAMES); @@ -1169,8 +1170,8 @@ public class ReservedRolesStoreTests extends ESTestCase { Role role = Role.builder(roleDescriptor, null).build(); assertThat(role.cluster().check(DeleteTransformAction.NAME, request, authentication), is(false)); - assertThat(role.cluster().check(GetTransformsAction.NAME, request, authentication), is(true)); - assertThat(role.cluster().check(GetTransformsStatsAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(GetTransformAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(GetTransformStatsAction.NAME, request, authentication), is(true)); assertThat(role.cluster().check(PreviewTransformAction.NAME, request, authentication), is(false)); assertThat(role.cluster().check(PutTransformAction.NAME, request, authentication), is(false)); assertThat(role.cluster().check(StartTransformAction.NAME, request, authentication), is(false)); @@ -1179,9 +1180,9 @@ public class ReservedRolesStoreTests extends ESTestCase { assertThat(role.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); - assertOnlyReadAllowed(role, ".data-frame-notifications-1"); + assertOnlyReadAllowed(role, TransformInternalIndexConstants.AUDIT_INDEX); assertNoAccessAllowed(role, "foo"); - assertNoAccessAllowed(role, ".data-frame-internal-1"); + assertNoAccessAllowed(role, TransformInternalIndexConstants.LATEST_INDEX_NAME); assertNoAccessAllowed(role, RestrictedIndicesNames.RESTRICTED_NAMES); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/support/DLSRoleQueryValidatorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/support/DLSRoleQueryValidatorTests.java new file mode 100644 index 000000000000..b441c40340df --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/support/DLSRoleQueryValidatorTests.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.authz.support; + +import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.BoostingQueryBuilder; +import org.elasticsearch.index.query.ConstantScoreQueryBuilder; +import org.elasticsearch.index.query.GeoShapeQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; +import org.elasticsearch.indices.TermsLookup; +import org.elasticsearch.join.query.HasChildQueryBuilder; +import org.elasticsearch.join.query.HasParentQueryBuilder; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class DLSRoleQueryValidatorTests extends ESTestCase { + + public void testVerifyRoleQuery() throws Exception { + QueryBuilder queryBuilder1 = new TermsQueryBuilder("field", "val1", "val2"); + DLSRoleQueryValidator.verifyRoleQuery(queryBuilder1); + + QueryBuilder queryBuilder2 = new TermsQueryBuilder("field", new TermsLookup("_index", "_id", "_path")); + Exception e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder2)); + assertThat(e.getMessage(), equalTo("terms query with terms lookup isn't supported as part of a role query")); + + QueryBuilder queryBuilder3 = new GeoShapeQueryBuilder("field", "_id", "_type"); + e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder3)); + assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query")); + + QueryBuilder queryBuilder4 = new HasChildQueryBuilder("_type", new MatchAllQueryBuilder(), ScoreMode.None); + e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder4)); + assertThat(e.getMessage(), equalTo("has_child query isn't supported as part of a role query")); + + QueryBuilder queryBuilder5 = new HasParentQueryBuilder("_type", new MatchAllQueryBuilder(), false); + e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder5)); + assertThat(e.getMessage(), equalTo("has_parent query isn't supported as part of a role query")); + + QueryBuilder queryBuilder6 = new BoolQueryBuilder().must(new GeoShapeQueryBuilder("field", "_id", "_type")); + e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder6)); + assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query")); + + QueryBuilder queryBuilder7 = new ConstantScoreQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type")); + e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder7)); + assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query")); + + QueryBuilder queryBuilder8 = new FunctionScoreQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type")); + e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder8)); + assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query")); + + QueryBuilder queryBuilder9 = new BoostingQueryBuilder(new GeoShapeQueryBuilder("field", "_id", "_type"), + new MatchAllQueryBuilder()); + e = expectThrows(IllegalArgumentException.class, () -> DLSRoleQueryValidator.verifyRoleQuery(queryBuilder9)); + assertThat(e.getMessage(), equalTo("geoshape query referring to indexed shapes isn't supported as part of a role query")); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformActionRequestTests.java similarity index 88% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsActionRequestTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformActionRequestTests.java index 53cb7d01cf26..4014ba1310b6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformActionRequestTests.java @@ -9,9 +9,9 @@ package org.elasticsearch.xpack.core.transform.action; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction.Request; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction.Request; -public class GetTransformsActionRequestTests extends AbstractWireSerializingTestCase { +public class GetTransformActionRequestTests extends AbstractWireSerializingTestCase { @Override protected Request createTestInstance() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformActionResponseTests.java similarity index 96% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsActionResponseTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformActionResponseTests.java index 8c274e2822dd..c0a13ca4bad5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformActionResponseTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction.Response; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction.Response; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformConfigTests; import org.elasticsearch.xpack.core.watcher.watch.Payload.XContent; @@ -22,7 +22,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -public class GetTransformsActionResponseTests extends AbstractWireSerializingTransformTestCase { +public class GetTransformActionResponseTests extends AbstractWireSerializingTransformTestCase { public void testInvalidTransforms() throws IOException { List transforms = new ArrayList<>(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsActionRequestTests.java similarity index 87% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsActionRequestTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsActionRequestTests.java index d685d28a5e05..c604c3a90ab1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsActionRequestTests.java @@ -9,9 +9,9 @@ package org.elasticsearch.xpack.core.transform.action; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction.Request; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction.Request; -public class GetTransformsStatsActionRequestTests extends AbstractWireSerializingTestCase { +public class GetTransformStatsActionRequestTests extends AbstractWireSerializingTestCase { @Override protected Request createTestInstance() { if (randomBoolean()) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsActionResponseTests.java similarity index 92% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsActionResponseTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsActionResponseTests.java index ebb0fbf078a1..df5142af058b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformsStatsActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsActionResponseTests.java @@ -10,14 +10,14 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction.Response; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction.Response; import org.elasticsearch.xpack.core.transform.transforms.TransformStats; import org.elasticsearch.xpack.core.transform.transforms.TransformStatsTests; import java.util.ArrayList; import java.util.List; -public class GetTransformsStatsActionResponseTests extends AbstractWireSerializingTransformTestCase { +public class GetTransformStatsActionResponseTests extends AbstractWireSerializingTransformTestCase { @Override protected Response createTestInstance() { List stats = new ArrayList<>(); diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java index 65348b3a77c7..24f86612f27f 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java @@ -13,11 +13,8 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; -import java.util.ArrayList; -import java.util.List; import java.util.Map; import java.util.function.BiConsumer; -import java.util.function.Function; /** * Index-specific deprecation checks @@ -31,49 +28,6 @@ public class IndexDeprecationChecks { } } - /** - * iterates through the "properties" field of mappings and returns any predicates that match in the - * form of issue-strings. - * - * @param type the document type - * @param parentMap the mapping to read properties from - * @param predicate the predicate to check against for issues, issue is returned if predicate evaluates to true - * @return a list of issues found in fields - */ - @SuppressWarnings("unchecked") - private static List findInPropertiesRecursively(String type, Map parentMap, - Function, Boolean> predicate) { - List issues = new ArrayList<>(); - Map properties = (Map) parentMap.get("properties"); - if (properties == null) { - return issues; - } - for (Map.Entry entry : properties.entrySet()) { - Map valueMap = (Map) entry.getValue(); - if (predicate.apply(valueMap)) { - issues.add("[type: " + type + ", field: " + entry.getKey() + "]"); - } - - Map values = (Map) valueMap.get("fields"); - if (values != null) { - for (Map.Entry multifieldEntry : values.entrySet()) { - Map multifieldValueMap = (Map) multifieldEntry.getValue(); - if (predicate.apply(multifieldValueMap)) { - issues.add("[type: " + type + ", field: " + entry.getKey() + ", multifield: " + multifieldEntry.getKey() + "]"); - } - if (multifieldValueMap.containsKey("properties")) { - issues.addAll(findInPropertiesRecursively(type, multifieldValueMap, predicate)); - } - } - } - if (valueMap.containsKey("properties")) { - issues.addAll(findInPropertiesRecursively(type, valueMap, predicate)); - } - } - - return issues; - } - static DeprecationIssue oldIndicesCheck(IndexMetaData indexMetaData) { Version createdWith = indexMetaData.getCreationVersion(); if (createdWith.before(Version.V_7_0_0)) { diff --git a/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java b/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java index a6cd886bf812..7c137599bb6f 100644 --- a/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java +++ b/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.index.engine; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.delete.DeleteResponse; @@ -38,7 +39,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; -import org.elasticsearch.search.internal.ShardSearchLocalRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.core.frozen.action.FreezeIndexAction; import org.elasticsearch.xpack.frozen.FrozenIndices; @@ -136,7 +137,7 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { client().prepareTermVectors("index", "" + randomIntBetween(0, 9)).execute(ActionListener.wrap(latch::countDown)); break; case 3: - client().prepareExplain("index", "_doc", "" + randomIntBetween(0, 9)).setQuery(new MatchAllQueryBuilder()) + client().prepareExplain("index", "" + randomIntBetween(0, 9)).setQuery(new MatchAllQueryBuilder()) .execute(ActionListener.wrap(latch::countDown)); break; default: @@ -250,17 +251,17 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { assertFalse(indexService.getIndexSettings().isSearchThrottled()); SearchService searchService = getInstanceFromNode(SearchService.class); SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(true); - assertTrue(searchService.canMatch(new ShardSearchLocalRequest(searchRequest, shard.shardId(), 1, + assertTrue(searchService.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, shard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); searchRequest.source(sourceBuilder); sourceBuilder.query(QueryBuilders.rangeQuery("field").gte("2010-01-03||+2d").lte("2010-01-04||+2d/d")); - assertTrue(searchService.canMatch(new ShardSearchLocalRequest(searchRequest, shard.shardId(), 1, + assertTrue(searchService.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, shard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); sourceBuilder.query(QueryBuilders.rangeQuery("field").gt("2010-01-06T02:00").lt("2010-01-07T02:00")); - assertFalse(searchService.canMatch(new ShardSearchLocalRequest(searchRequest, shard.shardId(), 1, + assertFalse(searchService.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, shard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); } @@ -274,17 +275,17 @@ public class FrozenIndexTests extends ESSingleNodeTestCase { assertTrue(indexService.getIndexSettings().isSearchThrottled()); SearchService searchService = getInstanceFromNode(SearchService.class); SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(true); - assertTrue(searchService.canMatch(new ShardSearchLocalRequest(searchRequest, shard.shardId(), 1, + assertTrue(searchService.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, shard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); sourceBuilder.query(QueryBuilders.rangeQuery("field").gte("2010-01-03||+2d").lte("2010-01-04||+2d/d")); searchRequest.source(sourceBuilder); - assertTrue(searchService.canMatch(new ShardSearchLocalRequest(searchRequest, shard.shardId(), 1, + assertTrue(searchService.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, shard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); sourceBuilder.query(QueryBuilders.rangeQuery("field").gt("2010-01-06T02:00").lt("2010-01-07T02:00")); - assertFalse(searchService.canMatch(new ShardSearchLocalRequest(searchRequest, shard.shardId(), 1, + assertFalse(searchService.canMatch(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, shard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1f, -1, null, null))); IndicesStatsResponse response = client().admin().indices().prepareStats("index").clear().setRefresh(true).get(); diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java index b4cad8395153..40331f168c2d 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/rest/action/RestGraphAction.java @@ -40,8 +40,6 @@ import static org.elasticsearch.xpack.core.graph.action.GraphExploreAction.INSTA public class RestGraphAction extends BaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGraphAction.class)); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + - " Specifying types in graph requests is deprecated."; public static final ParseField TIMEOUT_FIELD = new ParseField("timeout"); public static final ParseField SIGNIFICANCE_FIELD = new ParseField("use_significance"); @@ -65,20 +63,12 @@ public class RestGraphAction extends BaseRestHandler { public RestGraphAction(RestController controller) { // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( - GET, "/{index}/_graph/explore", this, - GET, "/{index}/_xpack/graph/_explore", deprecationLogger); + GET, "/{index}/_graph/explore", this, + GET, "/{index}/_xpack/graph/_explore", deprecationLogger); // TODO: remove deprecated endpoint in 8.0.0 controller.registerWithDeprecatedHandler( - POST, "/{index}/_graph/explore", this, - POST, "/{index}/_xpack/graph/_explore", deprecationLogger); - // TODO: remove deprecated endpoint in 8.0.0 - controller.registerWithDeprecatedHandler( - GET, "/{index}/{type}/_graph/explore", this, - GET, "/{index}/{type}/_xpack/graph/_explore", deprecationLogger); - // TODO: remove deprecated endpoint in 8.0.0 - controller.registerWithDeprecatedHandler( - POST, "/{index}/{type}/_graph/explore", this, - POST, "/{index}/{type}/_xpack/graph/_explore", deprecationLogger); + POST, "/{index}/_graph/explore", this, + POST, "/{index}/_xpack/graph/_explore", deprecationLogger); } @Override @@ -111,10 +101,6 @@ public class RestGraphAction extends BaseRestHandler { parseHop(parser, currentHop, graphRequest); } - if (request.hasParam("type")) { - deprecationLogger.deprecatedAndMaybeLog("graph_with_types", TYPES_DEPRECATION_MESSAGE); - graphRequest.types(Strings.splitStringByCommaToArray(request.param("type"))); - } return channel -> client.execute(INSTANCE, graphRequest, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java b/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java deleted file mode 100644 index fb91e6fc5ee8..000000000000 --- a/x-pack/plugin/graph/src/test/java/org/elasticsearch/xpack/graph/rest/action/RestGraphActionTests.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.graph.rest.action; - -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.test.rest.FakeRestRequest; -import org.elasticsearch.test.rest.RestActionTestCase; -import org.junit.Before; - -public class RestGraphActionTests extends RestActionTestCase { - - @Before - public void setUpAction() { - new RestGraphAction(controller()); - } - - public void testTypeInPath() { - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.GET) - .withPath("/some_index/some_type/_graph/explore") - .withContent(new BytesArray("{}"), XContentType.JSON) - .build(); - - dispatchRequest(request); - assertWarnings(RestGraphAction.TYPES_DEPRECATION_MESSAGE); - } - -} diff --git a/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleRestIT.java b/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleRestIT.java index 43194823281d..1d82bc77b115 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleRestIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleRestIT.java @@ -41,6 +41,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import java.util.function.Function; +import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.slm.history.SnapshotHistoryItem.CREATE_OPERATION; @@ -134,7 +136,7 @@ public class SnapshotLifecycleRestIT extends ESRestTestCase { assertHistoryIsPresent(policyName, true, repoId, CREATE_OPERATION); Map stats = getSLMStats(); - Map policyStats = (Map) stats.get(SnapshotLifecycleStats.POLICY_STATS.getPreferredName()); + Map policyStats = policyStatsAsMap(stats); Map policyIdStats = (Map) policyStats.get(policyName); int snapsTaken = (int) policyIdStats.get(SnapshotLifecycleStats.SnapshotPolicyStats.SNAPSHOTS_TAKEN.getPreferredName()); int totalTaken = (int) stats.get(SnapshotLifecycleStats.TOTAL_TAKEN.getPreferredName()); @@ -183,7 +185,7 @@ public class SnapshotLifecycleRestIT extends ESRestTestCase { assertHistoryIsPresent(policyName, false, repoName, CREATE_OPERATION); Map stats = getSLMStats(); - Map policyStats = (Map) stats.get(SnapshotLifecycleStats.POLICY_STATS.getPreferredName()); + Map policyStats = policyStatsAsMap(stats); Map policyIdStats = (Map) policyStats.get(policyName); int snapsFailed = (int) policyIdStats.get(SnapshotLifecycleStats.SnapshotPolicyStats.SNAPSHOTS_FAILED.getPreferredName()); int totalFailed = (int) stats.get(SnapshotLifecycleStats.TOTAL_FAILED.getPreferredName()); @@ -208,7 +210,7 @@ public class SnapshotLifecycleRestIT extends ESRestTestCase { createSnapshotPolicy(policyName, "snap", "1 2 3 4 5 ?", repoId, indexName, true); ResponseException badResp = expectThrows(ResponseException.class, - () -> client().performRequest(new Request("PUT", "/_slm/policy/" + policyName + "-bad/_execute"))); + () -> client().performRequest(new Request("POST", "/_slm/policy/" + policyName + "-bad/_execute"))); assertThat(EntityUtils.toString(badResp.getResponse().getEntity()), containsString("no such snapshot lifecycle policy [" + policyName + "-bad]")); @@ -232,7 +234,7 @@ public class SnapshotLifecycleRestIT extends ESRestTestCase { } Map stats = getSLMStats(); - Map policyStats = (Map) stats.get(SnapshotLifecycleStats.POLICY_STATS.getPreferredName()); + Map policyStats = policyStatsAsMap(stats); Map policyIdStats = (Map) policyStats.get(policyName); int snapsTaken = (int) policyIdStats.get(SnapshotLifecycleStats.SnapshotPolicyStats.SNAPSHOTS_TAKEN.getPreferredName()); int totalTaken = (int) stats.get(SnapshotLifecycleStats.TOTAL_TAKEN.getPreferredName()); @@ -304,7 +306,7 @@ public class SnapshotLifecycleRestIT extends ESRestTestCase { assertHistoryIsPresent(policyName, true, repoId, DELETE_OPERATION); Map stats = getSLMStats(); - Map policyStats = (Map) stats.get(SnapshotLifecycleStats.POLICY_STATS.getPreferredName()); + Map policyStats = policyStatsAsMap(stats); Map policyIdStats = (Map) policyStats.get(policyName); int snapsTaken = (int) policyIdStats.get(SnapshotLifecycleStats.SnapshotPolicyStats.SNAPSHOTS_TAKEN.getPreferredName()); int snapsDeleted = (int) policyIdStats.get(SnapshotLifecycleStats.SnapshotPolicyStats.SNAPSHOTS_DELETED.getPreferredName()); @@ -336,7 +338,7 @@ public class SnapshotLifecycleRestIT extends ESRestTestCase { */ private String executePolicy(String policyId) { try { - Response executeRepsonse = client().performRequest(new Request("PUT", "/_slm/policy/" + policyId + "/_execute")); + Response executeRepsonse = client().performRequest(new Request("POST", "/_slm/policy/" + policyId + "/_execute")); try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, EntityUtils.toByteArray(executeRepsonse.getEntity()))) { return parser.mapStrings().get("snapshot_name"); @@ -488,4 +490,13 @@ public class SnapshotLifecycleRestIT extends ESRestTestCase { request.setJsonEntity(Strings.toString(document)); assertOK(client.performRequest(request)); } + + @SuppressWarnings("unchecked") + private static Map policyStatsAsMap(Map stats) { + return ((List>) stats.get(SnapshotLifecycleStats.POLICY_STATS.getPreferredName())) + .stream() + .collect(Collectors.toMap( + m -> (String) m.get(SnapshotLifecycleStats.SnapshotPolicyStats.POLICY_ID.getPreferredName()), + Function.identity())); + } } diff --git a/x-pack/plugin/ilm/qa/with-security/src/test/java/org/elasticsearch/xpack/security/PermissionsIT.java b/x-pack/plugin/ilm/qa/with-security/src/test/java/org/elasticsearch/xpack/security/PermissionsIT.java index b69584ddef86..3c3f11ff6b56 100644 --- a/x-pack/plugin/ilm/qa/with-security/src/test/java/org/elasticsearch/xpack/security/PermissionsIT.java +++ b/x-pack/plugin/ilm/qa/with-security/src/test/java/org/elasticsearch/xpack/security/PermissionsIT.java @@ -245,8 +245,7 @@ public class PermissionsIT extends ESRestTestCase { * which was rolled over by an ILM policy. */ @TestIssueLogging(value = "org.elasticsearch:DEBUG", issueUrl = "https://github.com/elastic/elasticsearch/issues/41440") - public void testWhenUserLimitedByOnlyAliasOfIndexCanWriteToIndexWhichWasRolledoverByILMPolicy() - throws IOException, InterruptedException { + public void testWhenUserLimitedByOnlyAliasOfIndexCanWriteToIndexWhichWasRolledoverByILMPolicy() throws Exception { /* * Setup: * - ILM policy to rollover index when max docs condition is met @@ -265,33 +264,24 @@ public class PermissionsIT extends ESRestTestCase { refresh("foo_alias"); // wait so the ILM policy triggers rollover action, verify that the new index exists - assertThat(awaitBusy(() -> { + assertBusy(() -> { Request request = new Request("HEAD", "/" + "foo-logs-000002"); - int status; - try { - status = adminClient().performRequest(request).getStatusLine().getStatusCode(); - } catch (IOException e) { - throw new RuntimeException(e); - } - return status == 200; - }), is(true)); + int status = adminClient().performRequest(request).getStatusLine().getStatusCode(); + assertThat(status, equalTo(200)); + }); // test_user: index docs using alias, now should be able write to new index indexDocs("test_user", "x-pack-test-password", "foo_alias", 1); refresh("foo_alias"); // verify that the doc has been indexed into new write index - awaitBusy(() -> { + assertBusy(() -> { Request request = new Request("GET", "/foo-logs-000002/_search"); - Response response; - try { - response = adminClient().performRequest(request); - try (InputStream content = response.getEntity().getContent()) { - Map map = XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false); - return ((Integer) XContentMapValues.extractValue("hits.total.value", map)) == 1; - } - } catch (IOException e) { - throw new RuntimeException(e); + Response response = adminClient().performRequest(request); + try (InputStream content = response.getEntity().getContent()) { + Map map = XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false); + Integer totalHits = (Integer) XContentMapValues.extractValue("hits.total.value", map); + assertThat(totalHits, equalTo(1)); } }); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java index 2577e7192187..39e27f2a5afe 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; @@ -141,10 +142,12 @@ public class IndexLifecycle extends Plugin implements ActionPlugin { LifecycleSettings.LIFECYCLE_POLL_INTERVAL_SETTING, LifecycleSettings.LIFECYCLE_NAME_SETTING, LifecycleSettings.LIFECYCLE_ORIGINATION_DATE_SETTING, + LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE_SETTING, LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE_SETTING, RolloverAction.LIFECYCLE_ROLLOVER_ALIAS_SETTING, LifecycleSettings.SLM_HISTORY_INDEX_ENABLED_SETTING, - LifecycleSettings.SLM_RETENTION_SCHEDULE_SETTING); + LifecycleSettings.SLM_RETENTION_SCHEDULE_SETTING, + LifecycleSettings.SLM_RETENTION_DURATION_SETTING); } @Override @@ -268,6 +271,14 @@ public class IndexLifecycle extends Plugin implements ActionPlugin { return actions; } + @Override + public void onIndexModule(IndexModule indexModule) { + if (ilmEnabled) { + assert indexLifecycleInitialisationService.get() != null; + indexModule.addIndexEventListener(indexLifecycleInitialisationService.get()); + } + } + @Override public void close() { try { diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java index 22f8bb746e82..1cf860bf5c7c 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java @@ -22,6 +22,8 @@ import org.elasticsearch.common.component.Lifecycle.State; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.ilm.IndexLifecycleMetadata; @@ -39,11 +41,14 @@ import java.util.Collections; import java.util.Set; import java.util.function.LongSupplier; +import static org.elasticsearch.xpack.core.ilm.IndexLifecycleOriginationDateParser.parseIndexNameAndExtractDate; +import static org.elasticsearch.xpack.core.ilm.IndexLifecycleOriginationDateParser.shouldParseIndexName; + /** * A service which runs the {@link LifecyclePolicy}s associated with indexes. */ public class IndexLifecycleService - implements ClusterStateListener, ClusterStateApplier, SchedulerEngine.Listener, Closeable, LocalNodeMasterListener { + implements ClusterStateListener, ClusterStateApplier, SchedulerEngine.Listener, Closeable, LocalNodeMasterListener, IndexEventListener { private static final Logger logger = LogManager.getLogger(IndexLifecycleService.class); private static final Set IGNORE_STEPS_MAINTENANCE_REQUESTED = Collections.singleton(ShrinkStep.NAME); private volatile boolean isMaster = false; @@ -148,6 +153,13 @@ public class IndexLifecycleService return ThreadPool.Names.MANAGEMENT; } + @Override + public void beforeIndexAddedToCluster(Index index, Settings indexSettings) { + if (shouldParseIndexName(indexSettings)) { + parseIndexNameAndExtractDate(index.getName()); + } + } + private void updatePollInterval(TimeValue newInterval) { this.pollInterval = newInterval; maybeScheduleJob(); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTask.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTask.java index 993dac40252e..4c64437946ae 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTask.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTask.java @@ -23,6 +23,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.snapshots.SnapshotException; +import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.slm.SnapshotInvocationRecord; @@ -91,16 +93,32 @@ public class SnapshotLifecycleTask implements SchedulerEngine.Listener { public void onResponse(CreateSnapshotResponse createSnapshotResponse) { logger.debug("snapshot response for [{}]: {}", policyMetadata.getPolicy().getId(), Strings.toString(createSnapshotResponse)); - final long timestamp = Instant.now().toEpochMilli(); - clusterService.submitStateUpdateTask("slm-record-success-" + policyMetadata.getPolicy().getId(), - WriteJobStatus.success(policyMetadata.getPolicy().getId(), request.snapshot(), timestamp)); - historyStore.putAsync(SnapshotHistoryItem.creationSuccessRecord(timestamp, policyMetadata.getPolicy(), - request.snapshot())); + final SnapshotInfo snapInfo = createSnapshotResponse.getSnapshotInfo(); + + // Check that there are no failed shards, since the request may not entirely + // fail, but may still have failures (such as in the case of an aborted snapshot) + if (snapInfo.failedShards() == 0) { + final long timestamp = Instant.now().toEpochMilli(); + clusterService.submitStateUpdateTask("slm-record-success-" + policyMetadata.getPolicy().getId(), + WriteJobStatus.success(policyMetadata.getPolicy().getId(), request.snapshot(), timestamp)); + historyStore.putAsync(SnapshotHistoryItem.creationSuccessRecord(timestamp, policyMetadata.getPolicy(), + request.snapshot())); + } else { + int failures = snapInfo.failedShards(); + int total = snapInfo.totalShards(); + final SnapshotException e = new SnapshotException(request.repository(), request.snapshot(), + "failed to create snapshot successfully, " + failures + " out of " + total + " total shards failed"); + // Add each failed shard's exception as suppressed, the exception contains + // information about which shard failed + snapInfo.shardFailures().forEach(failure -> e.addSuppressed(failure.getCause())); + // Call the failure handler to register this as a failure and persist it + onFailure(e); + } } @Override public void onFailure(Exception e) { - logger.error("failed to issue create snapshot request for snapshot lifecycle policy [{}]: {}", + logger.error("failed to create snapshot for snapshot lifecycle policy [{}]: {}", policyMetadata.getPolicy().getId(), e); final long timestamp = Instant.now().toEpochMilli(); clusterService.submitStateUpdateTask("slm-record-failure-" + policyMetadata.getPolicy().getId(), diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java index 27c40fbe15ca..d19707a2d8bd 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java @@ -324,6 +324,7 @@ public class SnapshotRetentionTask implements SchedulerEngine.Listener { List snapshots = entry.getValue(); for (SnapshotInfo info : snapshots) { final String policyId = getPolicyId(info); + final long deleteStartTime = nowNanoSupplier.getAsLong(); deleteSnapshot(policyId, repo, info.snapshotId(), slmStats, ActionListener.wrap(acknowledgedResponse -> { deleted.incrementAndGet(); if (acknowledgedResponse.isAcknowledged()) { @@ -349,13 +350,15 @@ public class SnapshotRetentionTask implements SchedulerEngine.Listener { })); // Check whether we have exceeded the maximum time allowed to spend deleting // snapshots, if we have, short-circuit the rest of the deletions - TimeValue elapsedDeletionTime = TimeValue.timeValueNanos(nowNanoSupplier.getAsLong() - startTime); - logger.debug("elapsed time for deletion of [{}] snapshot: {}", info.snapshotId(), elapsedDeletionTime); - if (elapsedDeletionTime.compareTo(maximumTime) > 0) { + long finishTime = nowNanoSupplier.getAsLong(); + TimeValue deletionTime = TimeValue.timeValueNanos(finishTime - deleteStartTime); + logger.debug("elapsed time for deletion of [{}] snapshot: {}", info.snapshotId(), deletionTime); + TimeValue totalDeletionTime = TimeValue.timeValueNanos(finishTime - startTime); + if (totalDeletionTime.compareTo(maximumTime) > 0) { logger.info("maximum snapshot retention deletion time reached, time spent: [{}]," + " maximum allowed time: [{}], deleted [{}] out of [{}] snapshots scheduled for deletion, failed to delete [{}]", - elapsedDeletionTime, maximumTime, deleted, count, failed); - slmStats.deletionTime(elapsedDeletionTime); + totalDeletionTime, maximumTime, deleted, count, failed); + slmStats.deletionTime(totalDeletionTime); slmStats.retentionTimedOut(); return; } @@ -387,8 +390,8 @@ public class SnapshotRetentionTask implements SchedulerEngine.Listener { } else { logger.warn("[{}] snapshot [{}] delete issued but the request was not acknowledged", repo, snapshot); } - listener.onResponse(acknowledgedResponse); slmStats.snapshotDeleted(slmPolicy); + listener.onResponse(acknowledgedResponse); } @Override diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotLifecycleAction.java index a644df789d01..72eceee0f8ec 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/RestExecuteSnapshotLifecycleAction.java @@ -17,6 +17,7 @@ public class RestExecuteSnapshotLifecycleAction extends BaseRestHandler { public RestExecuteSnapshotLifecycleAction(RestController controller) { controller.registerHandler(RestRequest.Method.PUT, "/_slm/policy/{name}/_execute", this); + controller.registerHandler(RestRequest.Method.POST, "/_slm/policy/{name}/_execute", this); } @Override diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java index c000ee08eea1..91b350dfd851 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.Index; @@ -72,6 +73,7 @@ import static org.elasticsearch.xpack.core.ilm.LifecyclePolicyTestsUtils.newLock import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.core.CombinableMatcher.both; @@ -211,7 +213,7 @@ public class IndexLifecycleInitialisationTests extends ESIntegTestCase { { PhaseExecutionInfo expectedExecutionInfo = new PhaseExecutionInfo(lifecyclePolicy.getName(), mockPhase, 1L, actualModifiedDate); assertBusy(() -> { - IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse(); + IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse("test"); assertThat(indexResponse.getStep(), equalTo("observable_cluster_state_action")); assertThat(indexResponse.getPhaseExecutionInfo(), equalTo(expectedExecutionInfo)); originalLifecycleDate.set(indexResponse.getLifecycleDate()); @@ -224,7 +226,7 @@ public class IndexLifecycleInitialisationTests extends ESIntegTestCase { { assertBusy(() -> { - IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse(); + IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse("test"); assertThat("The configured origination date dictates the lifecycle date", indexResponse.getLifecycleDate(), equalTo(1000L)); }); @@ -236,7 +238,7 @@ public class IndexLifecycleInitialisationTests extends ESIntegTestCase { { assertBusy(() -> { - IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse(); + IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse("test"); assertThat("Without the origination date, the index create date should dictate the lifecycle date", indexResponse.getLifecycleDate(), equalTo(originalLifecycleDate.get())); }); @@ -249,7 +251,7 @@ public class IndexLifecycleInitialisationTests extends ESIntegTestCase { { PhaseExecutionInfo expectedExecutionInfo = new PhaseExecutionInfo(lifecyclePolicy.getName(), null, 1L, actualModifiedDate); assertBusy(() -> { - IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse(); + IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse("test"); assertThat(indexResponse.getPhase(), equalTo(TerminalPolicyStep.COMPLETED_PHASE)); assertThat(indexResponse.getStep(), equalTo(TerminalPolicyStep.KEY.getName())); assertThat(indexResponse.getPhaseExecutionInfo(), equalTo(expectedExecutionInfo)); @@ -257,11 +259,79 @@ public class IndexLifecycleInitialisationTests extends ESIntegTestCase { } } - private IndexLifecycleExplainResponse executeExplainRequestAndGetTestIndexResponse() throws ExecutionException, InterruptedException { + public void testExplainParseOriginationDate() throws Exception { + // start node + logger.info("Starting server1"); + internalCluster().startNode(); + logger.info("Starting server2"); + internalCluster().startNode(); + logger.info("Creating lifecycle [test_lifecycle]"); + PutLifecycleAction.Request putLifecycleRequest = new PutLifecycleAction.Request(lifecyclePolicy); + PutLifecycleAction.Response putLifecycleResponse = client().execute(PutLifecycleAction.INSTANCE, putLifecycleRequest).get(); + assertAcked(putLifecycleResponse); + + GetLifecycleAction.Response getLifecycleResponse = client().execute(GetLifecycleAction.INSTANCE, + new GetLifecycleAction.Request()).get(); + assertThat(getLifecycleResponse.getPolicies().size(), equalTo(1)); + GetLifecycleAction.LifecyclePolicyResponseItem responseItem = getLifecycleResponse.getPolicies().get(0); + assertThat(responseItem.getLifecyclePolicy(), equalTo(lifecyclePolicy)); + assertThat(responseItem.getVersion(), equalTo(1L)); + long actualModifiedDate = Instant.parse(responseItem.getModifiedDate()).toEpochMilli(); + + String indexName = "test-2019.09.14"; + logger.info("Creating index [{}]", indexName); + CreateIndexResponse createIndexResponse = + client().admin().indices().create(createIndexRequest(indexName) + .settings(Settings.builder().put(settings).put(LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, true)) + ).actionGet(); + assertAcked(createIndexResponse); + + DateFormatter dateFormatter = DateFormatter.forPattern("yyyy.MM.dd"); + long expectedDate = dateFormatter.parseMillis("2019.09.14"); + assertBusy(() -> { + IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse(indexName); + assertThat(indexResponse.getLifecycleDate(), is(expectedDate)); + }); + + // disabling the lifecycle parsing would maintain the parsed value as that was set as the origination date + client().admin().indices().prepareUpdateSettings(indexName) + .setSettings(Collections.singletonMap(LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, false)).get(); + + assertBusy(() -> { + IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse(indexName); + assertThat(indexResponse.getLifecycleDate(), is(expectedDate)); + }); + + // setting the lifecycle origination date setting to null should make the lifecyle date fallback on the index creation date + client().admin().indices().prepareUpdateSettings(indexName) + .setSettings(Collections.singletonMap(LifecycleSettings.LIFECYCLE_ORIGINATION_DATE, null)).get(); + + assertBusy(() -> { + IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse(indexName); + assertThat(indexResponse.getLifecycleDate(), is(greaterThan(expectedDate))); + }); + + // setting the lifecycle origination date to an explicit value overrides the date parsing + long originationDate = 42L; + client().admin().indices().prepareUpdateSettings(indexName) + .setSettings( + Map.of( + LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, true, + LifecycleSettings.LIFECYCLE_ORIGINATION_DATE, originationDate) + ).get(); + + assertBusy(() -> { + IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse(indexName); + assertThat(indexResponse.getLifecycleDate(), is(originationDate)); + }); + } + + private IndexLifecycleExplainResponse executeExplainRequestAndGetTestIndexResponse(String indexName) throws ExecutionException, + InterruptedException { ExplainLifecycleRequest explainRequest = new ExplainLifecycleRequest(); ExplainLifecycleResponse explainResponse = client().execute(ExplainLifecycleAction.INSTANCE, explainRequest).get(); assertThat(explainResponse.getIndexResponses().size(), equalTo(1)); - return explainResponse.getIndexResponses().get("test"); + return explainResponse.getIndexResponses().get(indexName); } public void testMasterDedicatedDataDedicated() throws Exception { diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java index 6776f13ea17c..87151627b02c 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java @@ -50,6 +50,7 @@ import java.time.ZoneId; import java.util.Collections; import java.util.SortedMap; import java.util.TreeMap; +import java.util.UUID; import java.util.concurrent.ExecutorService; import static org.elasticsearch.node.Node.NODE_MASTER_SETTING; @@ -305,4 +306,21 @@ public class IndexLifecycleServiceTests extends ESTestCase { indexLifecycleService.triggered(schedulerEvent); Mockito.verifyZeroInteractions(indicesClient, clusterService); } + + public void testParsingOriginationDateBeforeIndexCreation() { + Settings indexSettings = Settings.builder().put(LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, true).build(); + Index index = new Index("invalid_index_name", UUID.randomUUID().toString()); + expectThrows(IllegalArgumentException.class, + "The parse origination date setting was configured for index " + index.getName() + + " but the index name did not match the expected format", + () -> indexLifecycleService.beforeIndexAddedToCluster(index, indexSettings) + ); + + // disabling the parsing origination date setting should prevent the validation from throwing exception + try { + indexLifecycleService.beforeIndexAddedToCluster(index, Settings.EMPTY); + } catch (Exception e) { + fail("Did not expect the before index validation to throw an exception as the parse origination date setting was not set"); + } + } } diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java index 692299cf0e7b..f5e463852f48 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java @@ -124,6 +124,7 @@ public class SLMSnapshotBlockingIntegTests extends ESIntegTestCase { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/46508") public void testRetentionWhileSnapshotInProgress() throws Exception { final String indexName = "test"; final String policyId = "slm-policy"; diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java index 84c1d12cce65..5474602cdfda 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java @@ -23,6 +23,10 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.snapshots.SnapshotShardFailure; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; @@ -47,6 +51,7 @@ import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.startsWith; @@ -196,6 +201,83 @@ public class SnapshotLifecycleTaskTests extends ESTestCase { threadPool.shutdownNow(); } + public void testPartialFailureSnapshot() throws Exception { + final String id = randomAlphaOfLength(4); + final SnapshotLifecyclePolicyMetadata slpm = makePolicyMeta(id); + final SnapshotLifecycleMetadata meta = + new SnapshotLifecycleMetadata(Collections.singletonMap(id, slpm), OperationMode.RUNNING, new SnapshotLifecycleStats()); + + final ClusterState state = ClusterState.builder(new ClusterName("test")) + .metaData(MetaData.builder() + .putCustom(SnapshotLifecycleMetadata.TYPE, meta) + .build()) + .build(); + + final ThreadPool threadPool = new TestThreadPool("test"); + final AtomicBoolean clientCalled = new AtomicBoolean(false); + final SetOnce snapshotName = new SetOnce<>(); + try (ClusterService clusterService = ClusterServiceUtils.createClusterService(state, threadPool); + VerifyingClient client = new VerifyingClient(threadPool, + (action, request, listener) -> { + assertFalse(clientCalled.getAndSet(true)); + assertThat(action, instanceOf(CreateSnapshotAction.class)); + assertThat(request, instanceOf(CreateSnapshotRequest.class)); + + CreateSnapshotRequest req = (CreateSnapshotRequest) request; + + SnapshotLifecyclePolicy policy = slpm.getPolicy(); + assertThat(req.snapshot(), startsWith(policy.getName() + "-")); + assertThat(req.repository(), equalTo(policy.getRepository())); + snapshotName.set(req.snapshot()); + if (req.indices().length > 0) { + assertThat(Arrays.asList(req.indices()), equalTo(policy.getConfig().get("indices"))); + } + boolean globalState = policy.getConfig().get("include_global_state") == null || + Boolean.parseBoolean((String) policy.getConfig().get("include_global_state")); + assertThat(req.includeGlobalState(), equalTo(globalState)); + + return new CreateSnapshotResponse( + new SnapshotInfo( + new SnapshotId(req.snapshot(), "uuid"), + Arrays.asList(req.indices()), + randomNonNegativeLong(), + "snapshot started", + randomNonNegativeLong(), + 3, + Collections.singletonList( + new SnapshotShardFailure("nodeId", new ShardId("index", "uuid", 0), "forced failure")), + req.includeGlobalState(), + req.userMetadata() + )); + })) { + final AtomicBoolean historyStoreCalled = new AtomicBoolean(false); + SnapshotHistoryStore historyStore = new VerifyingHistoryStore(null, ZoneOffset.UTC, + item -> { + assertFalse(historyStoreCalled.getAndSet(true)); + final SnapshotLifecyclePolicy policy = slpm.getPolicy(); + assertEquals(policy.getId(), item.getPolicyId()); + assertEquals(policy.getRepository(), item.getRepository()); + assertEquals(policy.getConfig(), item.getSnapshotConfiguration()); + assertEquals(snapshotName.get(), item.getSnapshotName()); + assertFalse("item should be a failure", item.isSuccess()); + assertThat(item.getErrorDetails(), + containsString("failed to create snapshot successfully, 1 out of 3 total shards failed")); + assertThat(item.getErrorDetails(), + containsString("forced failure")); + }); + + SnapshotLifecycleTask task = new SnapshotLifecycleTask(client, clusterService, historyStore); + // Trigger the event with a matching job name for the policy + task.triggered(new SchedulerEngine.Event(SnapshotLifecycleService.getJobId(slpm), + System.currentTimeMillis(), System.currentTimeMillis())); + + assertTrue("snapshot should be triggered once", clientCalled.get()); + assertTrue("history store should be called once", historyStoreCalled.get()); + } + + threadPool.shutdownNow(); + } + /** * A client that delegates to a verifying function for action/request/listener */ diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle index 7c7b2018ce7a..4c6c8b49fa63 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle @@ -55,8 +55,6 @@ testClusters.integTest { setting 'xpack.security.audit.enabled', 'false' setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.ml.min_disk_space_off_heap', '200mb' - // TODO: remove when the cause of https://github.com/elastic/elasticsearch/issues/45652 is understood - setting 'logger.org.elasticsearch.rest.action.cat', 'DEBUG' keystore 'bootstrap.password', 'x-pack-test-password' keystore 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java index adcf1aaf5f19..26d168b5267c 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java @@ -167,7 +167,8 @@ public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase { client().admin().indices().prepareRefresh("*").get(); // We need to wait a second to ensure the second time around model snapshots will have a different ID (it depends on epoch seconds) - awaitBusy(() -> false, 1, TimeUnit.SECONDS); + // FIXME it would be better to wait for something concrete instead of wait for time to elapse + assertBusy(() -> {}, 1, TimeUnit.SECONDS); for (Job.Builder job : getJobs()) { // Run up to now diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index 114437a01c84..cfd760df9267 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -208,7 +208,9 @@ public class MlJobIT extends ESRestTestCase { } }); - String responseAsString = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + // Use _cat/indices/.ml-anomalies-* instead of _cat/indices/_all to workaround https://github.com/elastic/elasticsearch/issues/45652 + String responseAsString = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(responseAsString, containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName)); assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId1)))); @@ -272,7 +274,8 @@ public class MlJobIT extends ESRestTestCase { assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId1)))); assertThat(responseAsString, containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2))); //job2 still exists - responseAsString = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + responseAsString = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(responseAsString, containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName)); client().performRequest(new Request("POST", "/_refresh")); @@ -287,7 +290,8 @@ public class MlJobIT extends ESRestTestCase { assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2)))); client().performRequest(new Request("POST", "/_refresh")); - responseAsString = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + responseAsString = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(responseAsString, not(containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName))); } @@ -399,13 +403,16 @@ public class MlJobIT extends ESRestTestCase { String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; createFarequoteJob(jobId); - String indicesBeforeDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + // Use _cat/indices/.ml-anomalies-* instead of _cat/indices/_all to workaround https://github.com/elastic/elasticsearch/issues/45652 + String indicesBeforeDelete = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(indicesBeforeDelete, containsString(indexName)); client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId)); // check that the index still exists (it's shared by default) - String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + String indicesAfterDelete = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(indicesAfterDelete, containsString(indexName)); waitUntilIndexIsEmpty(indexName); @@ -469,7 +476,9 @@ public class MlJobIT extends ESRestTestCase { String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; createFarequoteJob(jobId); - String indicesBeforeDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + // Use _cat/indices/.ml-anomalies-* instead of _cat/indices/_all to workaround https://github.com/elastic/elasticsearch/issues/45652 + String indicesBeforeDelete = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(indicesBeforeDelete, containsString(indexName)); Response response = client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId @@ -481,7 +490,8 @@ public class MlJobIT extends ESRestTestCase { assertThat(EntityUtils.toString(taskResponse.getEntity()), containsString("\"acknowledged\":true")); // check that the index still exists (it's shared by default) - String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + String indicesAfterDelete = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(indicesAfterDelete, containsString(indexName)); waitUntilIndexIsEmpty(indexName); @@ -516,7 +526,9 @@ public class MlJobIT extends ESRestTestCase { String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; createFarequoteJob(jobId); - String indicesBeforeDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + // Use _cat/indices/.ml-anomalies-* instead of _cat/indices/_all to workaround https://github.com/elastic/elasticsearch/issues/45652 + String indicesBeforeDelete = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(indicesBeforeDelete, containsString(indexName)); // Manually delete the index so that we can test that deletion proceeds @@ -526,7 +538,8 @@ public class MlJobIT extends ESRestTestCase { client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId)); // check index was deleted - String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + String indicesAfterDelete = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(indicesAfterDelete, not(containsString(aliasName))); assertThat(indicesAfterDelete, not(containsString(indexName))); @@ -596,7 +609,9 @@ public class MlJobIT extends ESRestTestCase { "}"); client().performRequest(extraIndex2); - String indicesBeforeDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + // Use _cat/indices/.ml-anomalies-* instead of _cat/indices/_all to workaround https://github.com/elastic/elasticsearch/issues/45652 + String indicesBeforeDelete = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(indicesBeforeDelete, containsString(indexName)); assertThat(indicesBeforeDelete, containsString(indexName + "-001")); assertThat(indicesBeforeDelete, containsString(indexName + "-002")); @@ -635,7 +650,8 @@ public class MlJobIT extends ESRestTestCase { client().performRequest(new Request("POST", "/_refresh")); // check that the indices still exist but are empty - String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity()); + String indicesAfterDelete = EntityUtils.toString(client().performRequest( + new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity()); assertThat(indicesAfterDelete, containsString(indexName)); assertThat(indicesAfterDelete, containsString(indexName + "-001")); assertThat(indicesAfterDelete, containsString(indexName + "-002")); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java index 951252270952..55211010f97f 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java @@ -69,7 +69,7 @@ public class RevertModelSnapshotIT extends MlNativeAutodetectIntegTestCase { assertThat(revertPointBucket.isInterim(), is(true)); // We need to wait a second to ensure the second time around model snapshot will have a different ID (it depends on epoch seconds) - awaitBusy(() -> false, 1, TimeUnit.SECONDS); + waitUntil(() -> false, 1, TimeUnit.SECONDS); openJob(job.getId()); postData(job.getId(), generateData(startTime + 10 * bucketSpan.getMillis(), bucketSpan, 10, Arrays.asList("foo", "bar"), diff --git a/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java b/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java index 454a3eb06e5a..a70c0ef0af72 100644 --- a/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java +++ b/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java @@ -315,12 +315,12 @@ public class PainlessDomainSplitIT extends ESRestTestCase { client().performRequest(createFeedRequest); client().performRequest(new Request("POST", MachineLearning.BASE_PATH + "datafeeds/hrd-split-datafeed/_start")); - boolean passed = awaitBusy(() -> { - try { + try { + assertBusy(() -> { client().performRequest(new Request("POST", "/_refresh")); Response response = client().performRequest(new Request("GET", - MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job/results/records")); + MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job/results/records")); String responseBody = EntityUtils.toString(response.getEntity()); if (responseBody.contains("\"count\":2")) { @@ -339,27 +339,19 @@ public class PainlessDomainSplitIT extends ESRestTestCase { // domainSplit() tests had subdomain, testHighestRegisteredDomainCases() do not if (test.subDomainExpected != null) { assertThat("Expected subdomain [" + test.subDomainExpected + "] but found [" + actualSubDomain - + "]. Actual " + actualTotal + " vs Expected " + expectedTotal, actualSubDomain, - equalTo(test.subDomainExpected)); + + "]. Actual " + actualTotal + " vs Expected " + expectedTotal, actualSubDomain, + equalTo(test.subDomainExpected)); } assertThat("Expected domain [" + test.domainExpected + "] but found [" + actualDomain + "]. Actual " - + actualTotal + " vs Expected " + expectedTotal, actualDomain, equalTo(test.domainExpected)); - - return true; + + actualTotal + " vs Expected " + expectedTotal, actualDomain, equalTo(test.domainExpected)); } else { logger.error(responseBody); - return false; + fail("Response body didn't contain [\"count\":2]"); } - - } catch (Exception e) { - logger.error(e.getMessage()); - return false; - } - - }, 5, TimeUnit.SECONDS); - - if (!passed) { + }, 5, TimeUnit.SECONDS); + } catch (Exception e) { + logger.error(e.getMessage()); fail("Anomaly records were not found within 5 seconds"); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 1f5ade6b9ac4..cc01e15b3665 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -123,6 +123,7 @@ import org.elasticsearch.xpack.core.ml.action.ValidateDetectorAction; import org.elasticsearch.xpack.core.ml.action.ValidateJobConfigAction; import org.elasticsearch.xpack.core.ml.dataframe.analyses.MlDataFrameAnalysisNamedXContentProvider; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider; +import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; @@ -942,6 +943,8 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu List namedXContent = new ArrayList<>(); namedXContent.addAll(new MlEvaluationNamedXContentProvider().getNamedXContentParsers()); namedXContent.addAll(new MlDataFrameAnalysisNamedXContentProvider().getNamedXContentParsers()); + namedXContent.addAll(new MlInferenceNamedXContentProvider().getNamedXContentParsers()); return namedXContent; } + } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java index 2f7f9241038a..dd95d0a5e9b3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java @@ -79,12 +79,12 @@ public class MlDailyMaintenanceService implements Releasable { return TimeValue.timeValueMillis(next.toInstant().toEpochMilli() - now.toInstant().toEpochMilli()); } - public void start() { + public synchronized void start() { LOGGER.debug("Starting ML daily maintenance service"); scheduleNext(); } - public void stop() { + public synchronized void stop() { LOGGER.debug("Stopping ML daily maintenance service"); if (cancellable != null && cancellable.isCancelled() == false) { cancellable.cancel(); @@ -100,7 +100,7 @@ public class MlDailyMaintenanceService implements Releasable { stop(); } - private void scheduleNext() { + private synchronized void scheduleNext() { try { cancellable = threadPool.schedule(this::triggerTasks, schedulerProvider.get(), ThreadPool.Names.GENERIC); } catch (EsRejectedExecutionException e) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java index 326081f545c1..89ee978ca79e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java @@ -39,6 +39,7 @@ class MlInitializationService implements LocalNodeMasterListener, ClusterStateLi this.clusterService = clusterService; this.client = client; clusterService.addListener(this); + clusterService.addLocalNodeMasterListener(this); } @Override @@ -80,7 +81,7 @@ class MlInitializationService implements LocalNodeMasterListener, ClusterStateLi return ThreadPool.Names.GENERIC; } - private void installDailyMaintenanceService() { + private synchronized void installDailyMaintenanceService() { if (mlDailyMaintenanceService == null) { mlDailyMaintenanceService = new MlDailyMaintenanceService(clusterService.getClusterName(), threadPool, client); mlDailyMaintenanceService.start(); @@ -93,7 +94,7 @@ class MlInitializationService implements LocalNodeMasterListener, ClusterStateLi } } - private void uninstallDailyMaintenanceService() { + private synchronized void uninstallDailyMaintenanceService() { if (mlDailyMaintenanceService != null) { mlDailyMaintenanceService.stop(); mlDailyMaintenanceService = null; @@ -106,7 +107,7 @@ class MlInitializationService implements LocalNodeMasterListener, ClusterStateLi } /** For testing */ - void setDailyMaintenanceService(MlDailyMaintenanceService service) { + synchronized void setDailyMaintenanceService(MlDailyMaintenanceService service) { mlDailyMaintenanceService = service; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java index 2ca09af7d33a..5c48be663f11 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java @@ -12,12 +12,13 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.EvaluateDataFrameAction; import org.elasticsearch.xpack.core.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.xpack.core.ml.dataframe.evaluation.EvaluationMetricResult; +import org.elasticsearch.xpack.ml.utils.TypedChainTaskExecutor; import java.util.List; @@ -38,24 +39,64 @@ public class TransportEvaluateDataFrameAction extends HandledTransportAction listener) { - Evaluation evaluation = request.getEvaluation(); - SearchRequest searchRequest = new SearchRequest(request.getIndices()); - searchRequest.source(evaluation.buildSearch(request.getParsedQuery())); - - ActionListener> resultsListener = ActionListener.wrap( - results -> listener.onResponse(new EvaluateDataFrameAction.Response(evaluation.getName(), results)), + ActionListener> resultsListener = ActionListener.wrap( + unused -> { + EvaluateDataFrameAction.Response response = + new EvaluateDataFrameAction.Response(request.getEvaluation().getName(), request.getEvaluation().getResults()); + listener.onResponse(response); + }, listener::onFailure ); - client.execute(SearchAction.INSTANCE, searchRequest, ActionListener.wrap( - searchResponse -> threadPool.generic().execute(() -> { - try { - evaluation.evaluate(searchResponse, resultsListener); - } catch (Exception e) { - listener.onFailure(e); - }; - }), - listener::onFailure - )); + EvaluationExecutor evaluationExecutor = new EvaluationExecutor(threadPool, client, request); + evaluationExecutor.execute(resultsListener); + } + + /** + * {@link EvaluationExecutor} class allows for serial execution of evaluation steps. + * + * Each step consists of the following phases: + * 1. build search request with aggs requested by individual metrics + * 2. execute search action with the request built in (1.) + * 3. make all individual metrics process the search response obtained in (2.) + * 4. check if all the metrics have their results computed + * a) If so, call the final listener and finish + * b) Otherwise, add another step to the queue + * + * To avoid infinite loop it is essential that every metric *does* compute its result at some point. + * */ + private static final class EvaluationExecutor extends TypedChainTaskExecutor { + + private final Client client; + private final EvaluateDataFrameAction.Request request; + private final Evaluation evaluation; + + EvaluationExecutor(ThreadPool threadPool, Client client, EvaluateDataFrameAction.Request request) { + super(threadPool.generic(), unused -> true, unused -> true); + this.client = client; + this.request = request; + this.evaluation = request.getEvaluation(); + // Add one task only. Other tasks will be added as needed by the nextTask method itself. + add(nextTask()); + } + + private TypedChainTaskExecutor.ChainTask nextTask() { + return listener -> { + SearchSourceBuilder searchSourceBuilder = evaluation.buildSearch(request.getParsedQuery()); + SearchRequest searchRequest = new SearchRequest(request.getIndices()).source(searchSourceBuilder); + client.execute( + SearchAction.INSTANCE, + searchRequest, + ActionListener.wrap( + searchResponse -> { + evaluation.process(searchResponse); + if (evaluation.hasAllResults() == false) { + add(nextTask()); + } + listener.onResponse(null); + }, + listener::onFailure)); + }; + } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index e7a1ece44a82..6cf6b255678f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -165,10 +165,6 @@ public class TransportOpenJobAction extends TransportMasterNodeAction d.getRules().isEmpty() == false); - } - public static String nodeFilter(DiscoveryNode node, Job job) { String jobId = job.getId(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java index 2a172fd6d9c7..67b5e988826c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTask.java @@ -60,6 +60,7 @@ public class DataFrameAnalyticsTask extends AllocatedPersistentTask implements S private volatile Long reindexingTaskId; private volatile boolean isReindexingFinished; private volatile boolean isStopping; + private volatile boolean isMarkAsCompletedCalled; private final ProgressTracker progressTracker = new ProgressTracker(); public DataFrameAnalyticsTask(long id, String type, String action, TaskId parentTask, Map headers, @@ -102,10 +103,17 @@ public class DataFrameAnalyticsTask extends AllocatedPersistentTask implements S public void markAsCompleted() { // It is possible that the stop API has been called in the meantime and that // may also cause this method to be called. We check whether we have already - // been marked completed to avoid doing it twice. - if (isCompleted() == false) { - persistProgress(() -> super.markAsCompleted()); + // been marked completed to avoid doing it twice. We need to capture that + // locally instead of relying to isCompleted() because of the asynchronous + // persistence of progress. + synchronized (this) { + if (isMarkAsCompletedCalled) { + return; + } + isMarkAsCompletedCalled = true; } + + persistProgress(() -> super.markAsCompleted()); } @Override @@ -224,6 +232,7 @@ public class DataFrameAnalyticsTask extends AllocatedPersistentTask implements S } private void persistProgress(Runnable runnable) { + LOGGER.debug("[{}] Persisting progress", taskParams.getId()); GetDataFrameAnalyticsStatsAction.Request getStatsRequest = new GetDataFrameAnalyticsStatsAction.Request(taskParams.getId()); executeAsyncWithOrigin(client, ML_ORIGIN, GetDataFrameAnalyticsStatsAction.INSTANCE, getStatsRequest, ActionListener.wrap( statsResponse -> { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java index 585b4d1f87df..9944b8f4fc00 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java @@ -543,21 +543,6 @@ public class JobConfigProvider { } - private SearchRequest makeExpandIdsSearchRequest(String expression, boolean excludeDeleting) { - String [] tokens = ExpandedIdsMatcher.tokenizeExpression(expression); - SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(buildQuery(tokens, excludeDeleting)); - sourceBuilder.sort(Job.ID.getPreferredName()); - sourceBuilder.fetchSource(false); - sourceBuilder.docValueField(Job.ID.getPreferredName(), null); - sourceBuilder.docValueField(Job.GROUPS.getPreferredName(), null); - - return client.prepareSearch(AnomalyDetectorsIndex.configIndexName()) - .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .setSource(sourceBuilder) - .setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW) - .request(); - } - /** * The same logic as {@link #expandJobsIds(String, boolean, boolean, ActionListener)} but * the full anomaly detector job configuration is returned. diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java index 8d49a0bfdbcc..7f28a0859c61 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java @@ -284,7 +284,7 @@ public class AutodetectCommunicator implements Closeable { } @Nullable - FlushAcknowledgement waitFlushToCompletion(String flushId) throws InterruptedException { + FlushAcknowledgement waitFlushToCompletion(String flushId) throws Exception { LOGGER.debug("[{}] waiting for flush", job.getId()); FlushAcknowledgement flushAcknowledgement; @@ -300,6 +300,7 @@ public class AutodetectCommunicator implements Closeable { } if (processKilled == false) { + LOGGER.debug("[{}] Initial flush completed, waiting until renormalizer is idle.", job.getId()); // We also have to wait for the normalizer to become idle so that we block // clients from querying results in the middle of normalization. autodetectResultProcessor.waitUntilRenormalizerIsIdle(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessor.java index 282dfa2c2f93..c9441e9f60c3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessor.java @@ -288,9 +288,21 @@ public class AutodetectResultProcessor { // Commit previous writes here, effectively continuing // the flush from the C++ autodetect process right // through to the data store - bulkResultsPersister.executeRequest(); - persister.commitResultWrites(jobId); - flushListener.acknowledgeFlush(flushAcknowledgement); + Exception exception = null; + try { + bulkResultsPersister.executeRequest(); + persister.commitResultWrites(jobId); + LOGGER.debug("[{}] Flush acknowledgement sent to listener for ID {}", jobId, flushAcknowledgement.getId()); + } catch (Exception e) { + LOGGER.error( + "[" + jobId + "] failed to bulk persist results and commit writes during flush acknowledgement for ID " + + flushAcknowledgement.getId(), + e); + exception = e; + throw e; + } finally { + flushListener.acknowledgeFlush(flushAcknowledgement, exception); + } // Interim results may have been produced by the flush, // which need to be // deleted when the next finalized results come through @@ -391,7 +403,7 @@ public class AutodetectResultProcessor { * @return The {@link FlushAcknowledgement} if the flush has completed or the parsing finished; {@code null} if the timeout expired */ @Nullable - public FlushAcknowledgement waitForFlushAcknowledgement(String flushId, Duration timeout) throws InterruptedException { + public FlushAcknowledgement waitForFlushAcknowledgement(String flushId, Duration timeout) throws Exception { return failed ? null : flushListener.waitForFlush(flushId, timeout); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushListener.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushListener.java index 0028bfef9288..2a349ce8aee3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushListener.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushListener.java @@ -28,25 +28,29 @@ class FlushListener { }); @Nullable - FlushAcknowledgement waitForFlush(String flushId, Duration timeout) throws InterruptedException { + FlushAcknowledgement waitForFlush(String flushId, Duration timeout) throws Exception { if (onClear.hasRun()) { return null; } FlushAcknowledgementHolder holder = awaitingFlushed.computeIfAbsent(flushId, (key) -> new FlushAcknowledgementHolder(flushId)); if (holder.latch.await(timeout.toMillis(), TimeUnit.MILLISECONDS)) { + if (holder.flushException != null) { + throw holder.flushException; + } return holder.flushAcknowledgement; } return null; } - void acknowledgeFlush(FlushAcknowledgement flushAcknowledgement) { + void acknowledgeFlush(FlushAcknowledgement flushAcknowledgement, @Nullable Exception exception) { // acknowledgeFlush(...) could be called before waitForFlush(...) // a flush api call writes a flush command to the analytical process and then via a different thread the // result reader then reads whether the flush has been acked. String flushId = flushAcknowledgement.getId(); FlushAcknowledgementHolder holder = awaitingFlushed.computeIfAbsent(flushId, (key) -> new FlushAcknowledgementHolder(flushId)); holder.flushAcknowledgement = flushAcknowledgement; + holder.flushException = exception; holder.latch.countDown(); } @@ -62,6 +66,7 @@ class FlushListener { private final CountDownLatch latch; private volatile FlushAcknowledgement flushAcknowledgement; + private volatile Exception flushException; private FlushAcknowledgementHolder(String flushId) { this.flushAcknowledgement = new FlushAcknowledgement(flushId, null); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/ShortCircuitingRenormalizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/ShortCircuitingRenormalizer.java index 0bd5a11609d6..519609d4a7e7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/ShortCircuitingRenormalizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/ShortCircuitingRenormalizer.java @@ -122,7 +122,26 @@ public class ShortCircuitingRenormalizer implements Renormalizer { } private void forceFinishWork() { - semaphore.release(); + // We cannot allow new quantiles to be added while we are failing from a previous renormalization failure. + synchronized (quantilesDeque) { + // We discard all but the earliest quantiles, if they exist + QuantilesWithLatch earliestQuantileWithLatch = null; + for (QuantilesWithLatch quantilesWithLatch = quantilesDeque.pollFirst(); quantilesWithLatch != null; + quantilesWithLatch = quantilesDeque.pollFirst()) { + if (earliestQuantileWithLatch == null) { + earliestQuantileWithLatch = quantilesWithLatch; + } + // Count down all the latches as they no longer matter since we failed + quantilesWithLatch.latch.countDown(); + } + // Keep the earliest quantile so that the next call to doRenormalizations() will include as much as the failed normalization + // window as possible. + // Since this latch is already countedDown, there is no reason to put it in the `latchDeque` again + if (earliestQuantileWithLatch != null) { + quantilesDeque.addLast(earliestQuantileWithLatch); + } + semaphore.release(); + } } private void doRenormalizations() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java index a67ff4ab3374..6a4230acf643 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESTestCase; @@ -236,7 +235,7 @@ public class DataFrameRowsJoinerTests extends ESTestCase { } private static SearchHit newHit(String json) { - SearchHit hit = new SearchHit(randomInt(), randomAlphaOfLength(10), new Text("doc"), Collections.emptyMap()); + SearchHit hit = new SearchHit(randomInt(), randomAlphaOfLength(10), Collections.emptyMap()); hit.sourceRef(new BytesArray(json)); return hit; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java index e59f724a3637..3a9430bd7fb5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java @@ -79,6 +79,7 @@ public class BasicDistributedJobsIT extends BaseMlIntegTestCase { awaitJobOpenedAndAssigned(job.getId(), null); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/44566") public void testFailOverBasics_withDataFeeder() throws Exception { internalCluster().ensureAtLeastNumDataNodes(4); ensureStableCluster(4); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java index d845bb5751fd..76b1b459c216 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java @@ -33,7 +33,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.Index; @@ -1033,10 +1032,6 @@ public class JobResultsProviderTests extends ESTestCase { verifyNoMoreInteractions(client); } - private Bucket createBucketAtEpochTime(long epoch) { - return new Bucket("foo", new Date(epoch), 123); - } - private JobResultsProvider createProvider(Client client) { return new JobResultsProvider(client, Settings.EMPTY); } @@ -1052,7 +1047,7 @@ public class JobResultsProviderTests extends ESTestCase { fields.put("field_1", new DocumentField("field_1", Collections.singletonList("foo"))); fields.put("field_2", new DocumentField("field_2", Collections.singletonList("foo"))); - SearchHit hit = new SearchHit(123, String.valueOf(map.hashCode()), new Text("foo"), fields) + SearchHit hit = new SearchHit(123, String.valueOf(map.hashCode()), fields) .sourceRef(BytesReference.bytes(XContentFactory.jsonBuilder().map(_source))); list.add(hit); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java index 9f6d52950732..4562779fc292 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java @@ -108,7 +108,7 @@ public class AutodetectCommunicatorTests extends ESTestCase { verifyNoMoreInteractions(process); } - public void testFlushJob() throws IOException, InterruptedException { + public void testFlushJob() throws Exception { AutodetectProcess process = mockAutodetectProcessWithOutputStream(); when(process.isProcessAlive()).thenReturn(true); AutodetectResultProcessor processor = mock(AutodetectResultProcessor.class); @@ -123,7 +123,7 @@ public class AutodetectCommunicatorTests extends ESTestCase { } } - public void testWaitForFlushReturnsIfParserFails() throws IOException, InterruptedException { + public void testWaitForFlushReturnsIfParserFails() throws Exception { AutodetectProcess process = mockAutodetectProcessWithOutputStream(); when(process.isProcessAlive()).thenReturn(true); AutodetectResultProcessor processor = mock(AutodetectResultProcessor.class); @@ -144,7 +144,7 @@ public class AutodetectCommunicatorTests extends ESTestCase { assertEquals("[foo] Unexpected death of autodetect: Mock process is dead", holder[0].getMessage()); } - public void testFlushJob_givenFlushWaitReturnsTrueOnSecondCall() throws IOException, InterruptedException { + public void testFlushJob_givenFlushWaitReturnsTrueOnSecondCall() throws Exception { AutodetectProcess process = mockAutodetectProcessWithOutputStream(); when(process.isProcessAlive()).thenReturn(true); AutodetectResultProcessor autodetectResultProcessor = Mockito.mock(AutodetectResultProcessor.class); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessorTests.java index d76f87b5e543..66f145d405c4 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultProcessorTests.java @@ -220,7 +220,7 @@ public class AutodetectResultProcessorTests extends ESTestCase { assertTrue(processorUnderTest.isDeleteInterimRequired()); verify(persister).bulkPersisterBuilder(JOB_ID); - verify(flushListener).acknowledgeFlush(flushAcknowledgement); + verify(flushListener).acknowledgeFlush(flushAcknowledgement, null); verify(persister).commitResultWrites(JOB_ID); verify(bulkBuilder).executeRequest(); } @@ -242,7 +242,7 @@ public class AutodetectResultProcessorTests extends ESTestCase { inOrder.verify(persister).persistCategoryDefinition(categoryDefinition); inOrder.verify(bulkBuilder).executeRequest(); inOrder.verify(persister).commitResultWrites(JOB_ID); - inOrder.verify(flushListener).acknowledgeFlush(flushAcknowledgement); + inOrder.verify(flushListener).acknowledgeFlush(flushAcknowledgement, null); } public void testProcessResult_modelPlot() { @@ -397,7 +397,7 @@ public class AutodetectResultProcessorTests extends ESTestCase { verify(persister, times(2)).persistModelSnapshot(any(), eq(WriteRequest.RefreshPolicy.IMMEDIATE)); } - public void testParsingErrorSetsFailed() throws InterruptedException { + public void testParsingErrorSetsFailed() throws Exception { @SuppressWarnings("unchecked") Iterator iterator = mock(Iterator.class); when(iterator.hasNext()).thenThrow(new ElasticsearchParseException("this test throws")); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushListenerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushListenerTests.java index 3343882d581b..fa506ced23d6 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushListenerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/FlushListenerTests.java @@ -14,6 +14,7 @@ import java.util.Date; import java.util.List; import java.util.concurrent.atomic.AtomicReference; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; public class FlushListenerTests extends ESTestCase { @@ -27,12 +28,14 @@ public class FlushListenerTests extends ESTestCase { flushAcknowledgementHolder.set(flushAcknowledgement); } catch (InterruptedException _ex) { Thread.currentThread().interrupt(); + } catch (Exception ex) { + fail("unexpected exception " + ex.getMessage()); } }).start(); assertBusy(() -> assertTrue(listener.awaitingFlushed.containsKey("_id"))); assertNull(flushAcknowledgementHolder.get()); FlushAcknowledgement flushAcknowledgement = new FlushAcknowledgement("_id", new Date(12345678L)); - listener.acknowledgeFlush(flushAcknowledgement); + listener.acknowledgeFlush(flushAcknowledgement, null); assertBusy(() -> assertNotNull(flushAcknowledgementHolder.get())); assertEquals(1, listener.awaitingFlushed.size()); @@ -40,6 +43,33 @@ public class FlushListenerTests extends ESTestCase { assertEquals(0, listener.awaitingFlushed.size()); } + public void testAcknowledgeFlushFailure() throws Exception { + FlushListener listener = new FlushListener(); + AtomicReference flushExceptionHolder = new AtomicReference<>(); + new Thread(() -> { + try { + listener.waitForFlush("_id", Duration.ofMillis(10000)); + fail("Expected exception to throw."); + } catch (InterruptedException _ex) { + Thread.currentThread().interrupt(); + } catch (Exception ex) { + flushExceptionHolder.set(ex); + } + }).start(); + assertBusy(() -> assertTrue(listener.awaitingFlushed.containsKey("_id"))); + assertNull(flushExceptionHolder.get()); + FlushAcknowledgement flushAcknowledgement = new FlushAcknowledgement("_id", new Date(12345678L)); + listener.acknowledgeFlush(flushAcknowledgement, new Exception("BOOM")); + assertBusy(() -> { + assertNotNull(flushExceptionHolder.get()); + assertThat(flushExceptionHolder.get().getMessage(), equalTo("BOOM")); + }); + assertEquals(1, listener.awaitingFlushed.size()); + + listener.clear("_id"); + assertEquals(0, listener.awaitingFlushed.size()); + } + public void testClear() throws Exception { FlushListener listener = new FlushListener(); @@ -55,6 +85,8 @@ public class FlushListenerTests extends ESTestCase { flushAcknowledgementHolder.set(flushAcknowledgement); } catch (InterruptedException _ex) { Thread.currentThread().interrupt(); + } catch (Exception ex) { + fail("unexpected exception " + ex.getMessage()); } }).start(); } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index b1ab7f845315..80cdcd6643e7 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.monitoring.integration; -import org.apache.lucene.util.Constants; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -24,7 +22,6 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.license.License; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; @@ -43,12 +40,6 @@ import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkResponse; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.monitoring.LocalStateMonitoring; import org.elasticsearch.xpack.monitoring.MonitoringService; -import org.elasticsearch.xpack.monitoring.collector.cluster.ClusterStatsMonitoringDoc; -import org.elasticsearch.xpack.monitoring.collector.indices.IndexRecoveryMonitoringDoc; -import org.elasticsearch.xpack.monitoring.collector.indices.IndexStatsMonitoringDoc; -import org.elasticsearch.xpack.monitoring.collector.indices.IndicesStatsMonitoringDoc; -import org.elasticsearch.xpack.monitoring.collector.node.NodeStatsMonitoringDoc; -import org.elasticsearch.xpack.monitoring.collector.shards.ShardMonitoringDoc; import org.elasticsearch.xpack.monitoring.test.MockIngestPlugin; import java.io.IOException; @@ -74,13 +65,11 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke import static org.elasticsearch.threadpool.ThreadPool.Names.WRITE; import static org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils.TEMPLATE_VERSION; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.isEmptyOrNullString; -import static org.hamcrest.Matchers.isOneOf; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -179,7 +168,7 @@ public class MonitoringIT extends ESSingleNodeTestCase { equalTo(1L)); for (final SearchHit hit : hits.getHits()) { - assertMonitoringDoc(toMap(hit), system, "monitoring_data_type", interval); + assertMonitoringDoc(toMap(hit), system, interval); } }); } @@ -227,25 +216,7 @@ public class MonitoringIT extends ESSingleNodeTestCase { for (final SearchHit hit : searchResponse.get().getHits()) { final Map searchHit = toMap(hit); - final String type = (String) extractValue("_source.type", searchHit); - - assertMonitoringDoc(searchHit, MonitoredSystem.ES, type, MonitoringService.MIN_INTERVAL); - - if (ClusterStatsMonitoringDoc.TYPE.equals(type)) { - assertClusterStatsMonitoringDoc(searchHit, createAPMIndex); - } else if (IndexRecoveryMonitoringDoc.TYPE.equals(type)) { - assertIndexRecoveryMonitoringDoc(searchHit); - } else if (IndicesStatsMonitoringDoc.TYPE.equals(type)) { - assertIndicesStatsMonitoringDoc(searchHit); - } else if (IndexStatsMonitoringDoc.TYPE.equals(type)) { - assertIndexStatsMonitoringDoc(searchHit); - } else if (NodeStatsMonitoringDoc.TYPE.equals(type)) { - assertNodeStatsMonitoringDoc(searchHit); - } else if (ShardMonitoringDoc.TYPE.equals(type)) { - assertShardMonitoringDoc(searchHit); - } else { - fail("Monitoring document of type [" + type + "] is not supported by this test"); - } + assertMonitoringDoc(searchHit, MonitoredSystem.ES, MonitoringService.MIN_INTERVAL); } }); @@ -258,9 +229,8 @@ public class MonitoringIT extends ESSingleNodeTestCase { @SuppressWarnings("unchecked") private void assertMonitoringDoc(final Map document, final MonitoredSystem expectedSystem, - final String expectedType, final TimeValue interval) { - assertEquals(document.toString(),4, document.size()); + assertEquals(document.toString(),3, document.size()); final String index = (String) document.get("_index"); assertThat(index, containsString(".monitoring-" + expectedSystem.getSystem() + "-" + TEMPLATE_VERSION + "-")); @@ -269,7 +239,6 @@ public class MonitoringIT extends ESSingleNodeTestCase { final Map source = (Map) document.get("_source"); assertThat(source, notNullValue()); assertThat((String) source.get("cluster_uuid"), not(isEmptyOrNullString())); - assertThat(source.get("type"), equalTo(expectedType)); final String timestamp = (String) source.get("timestamp"); assertThat(timestamp, not(isEmptyOrNullString())); @@ -308,204 +277,6 @@ public class MonitoringIT extends ESSingleNodeTestCase { assertThat((String) sourceNode.get("timestamp"), not(isEmptyOrNullString())); } - /** - * Assert that a {@link ClusterStatsMonitoringDoc} contains the expected information - */ - @SuppressWarnings("unchecked") - private void assertClusterStatsMonitoringDoc(final Map document, - final boolean apmIndicesExist) { - final Map source = (Map) document.get("_source"); - assertEquals(12, source.size()); - - assertThat((String) source.get("cluster_name"), not(isEmptyOrNullString())); - assertThat(source.get("version"), equalTo(Version.CURRENT.toString())); - - final Map license = (Map) source.get("license"); - assertThat(license, notNullValue()); - assertThat((String) license.get(License.Fields.ISSUER), not(isEmptyOrNullString())); - assertThat((String) license.get(License.Fields.ISSUED_TO), not(isEmptyOrNullString())); - assertThat((Long) license.get(License.Fields.ISSUE_DATE_IN_MILLIS), greaterThan(0L)); - assertThat((Integer) license.get(License.Fields.MAX_NODES), greaterThan(0)); - - String uid = (String) license.get("uid"); - assertThat(uid, not(isEmptyOrNullString())); - - String type = (String) license.get("type"); - assertThat(type, not(isEmptyOrNullString())); - - String status = (String) license.get(License.Fields.STATUS); - assertThat(status, not(isEmptyOrNullString())); - - if ("basic".equals(license.get("type")) == false) { - Long expiryDate = (Long) license.get(License.Fields.EXPIRY_DATE_IN_MILLIS); - assertThat(expiryDate, greaterThan(0L)); - } - - Boolean clusterNeedsTLS = (Boolean) license.get("cluster_needs_tls"); - assertThat(clusterNeedsTLS, isOneOf(true, null)); - - final Map clusterStats = (Map) source.get("cluster_stats"); - assertThat(clusterStats, notNullValue()); - assertThat(clusterStats.size(), equalTo(5)); - - final Map stackStats = (Map) source.get("stack_stats"); - assertThat(stackStats, notNullValue()); - assertThat(stackStats.size(), equalTo(2)); - - final Map apm = (Map) stackStats.get("apm"); - assertThat(apm, notNullValue()); - assertThat(apm.size(), equalTo(1)); - assertThat(apm.remove("found"), is(apmIndicesExist)); - assertThat(apm.keySet(), empty()); - - final Map xpackStats = (Map) stackStats.get("xpack"); - assertThat(xpackStats, notNullValue()); - assertThat("X-Pack stats must have at least monitoring, but others may be hidden", xpackStats.size(), greaterThanOrEqualTo(1)); - - final Map monitoring = (Map) xpackStats.get("monitoring"); - // we don't make any assumptions about what's in it, only that it's there - assertThat(monitoring, notNullValue()); - - final Map clusterState = (Map) source.get("cluster_state"); - assertThat(clusterState, notNullValue()); - assertThat(clusterState.remove("nodes_hash"), notNullValue()); - assertThat(clusterState.remove("status"), notNullValue()); - assertThat(clusterState.remove("version"), notNullValue()); - assertThat(clusterState.remove("state_uuid"), notNullValue()); - assertThat(clusterState.remove("cluster_uuid"), notNullValue()); - assertThat(clusterState.remove("master_node"), notNullValue()); - assertThat(clusterState.remove("nodes"), notNullValue()); - assertThat(clusterState.keySet(), empty()); - - final Map clusterSettings = (Map) source.get("cluster_settings"); - assertThat(clusterSettings, notNullValue()); - assertThat(clusterSettings.remove("cluster"), notNullValue()); - assertThat(clusterSettings.keySet(), empty()); - } - - /** - * Assert that a {@link IndexRecoveryMonitoringDoc} contains the expected information - */ - @SuppressWarnings("unchecked") - private void assertIndexRecoveryMonitoringDoc(final Map document) { - final Map source = (Map) document.get("_source"); - assertEquals(6, source.size()); - - final Map indexRecovery = (Map) source.get(IndexRecoveryMonitoringDoc.TYPE); - assertEquals(1, indexRecovery.size()); - - final List shards = (List) indexRecovery.get("shards"); - assertThat(shards, notNullValue()); - } - - /** - * Assert that a {@link IndicesStatsMonitoringDoc} contains the expected information - */ - @SuppressWarnings("unchecked") - private void assertIndicesStatsMonitoringDoc(final Map document) { - final Map source = (Map) document.get("_source"); - assertEquals(6, source.size()); - - final Map indicesStats = (Map) source.get(IndicesStatsMonitoringDoc.TYPE); - assertEquals(1, indicesStats.size()); - - IndicesStatsMonitoringDoc.XCONTENT_FILTERS.forEach(filter -> - assertThat(filter + " must not be null in the monitoring document", extractValue(filter, source), notNullValue())); - } - - /** - * Assert that a {@link IndexStatsMonitoringDoc} contains the expected information - */ - @SuppressWarnings("unchecked") - private void assertIndexStatsMonitoringDoc(final Map document) { - final Map source = (Map) document.get("_source"); - assertEquals(6, source.size()); - - // particular field values checked in the index stats tests - final Map indexStats = (Map) source.get(IndexStatsMonitoringDoc.TYPE); - assertEquals(7, indexStats.size()); - assertThat((String) indexStats.get("index"), not(isEmptyOrNullString())); - assertThat((String) indexStats.get("uuid"), not(isEmptyOrNullString())); - assertThat(indexStats.get("created"), notNullValue()); - assertThat((String) indexStats.get("status"), not(isEmptyOrNullString())); - assertThat(indexStats.get("shards"), notNullValue()); - final Map shards = (Map) indexStats.get("shards"); - assertEquals(11, shards.size()); - assertThat(indexStats.get("primaries"), notNullValue()); - assertThat(indexStats.get("total"), notNullValue()); - - IndexStatsMonitoringDoc.XCONTENT_FILTERS.forEach(filter -> - assertThat(filter + " must not be null in the monitoring document", extractValue(filter, source), notNullValue())); - } - - /** - * Assert that a {@link NodeStatsMonitoringDoc} contains the expected information - */ - @SuppressWarnings("unchecked") - private void assertNodeStatsMonitoringDoc(final Map document) { - final Map source = (Map) document.get("_source"); - assertEquals(6, source.size()); - - NodeStatsMonitoringDoc.XCONTENT_FILTERS.forEach(filter -> { - if (Constants.WINDOWS && filter.startsWith("node_stats.os.cpu.load_average")) { - // load average is unavailable on Windows - return; - } - - // fs and cgroup stats are only reported on Linux, but it's acceptable for _node/stats to report them as null if the OS is - // misconfigured or not reporting them for some reason (e.g., older kernel) - if (filter.startsWith("node_stats.fs") || filter.startsWith("node_stats.os.cgroup")) { - return; - } - - // load average is unavailable on macOS for 5m and 15m (but we get 1m), but it's also possible on Linux too - if ("node_stats.os.cpu.load_average.5m".equals(filter) || "node_stats.os.cpu.load_average.15m".equals(filter)) { - return; - } - - // bulk is not a thread pool in the current version but we allow it to support mixed version clusters - if (filter.startsWith("node_stats.thread_pool.bulk")) { - return; - } - - assertThat(filter + " must not be null in the monitoring document", extractValue(filter, source), notNullValue()); - }); - } - - /** - * Assert that a {@link ShardMonitoringDoc} contains the expected information - */ - @SuppressWarnings("unchecked") - private void assertShardMonitoringDoc(final Map document) { - final Map source = (Map) document.get("_source"); - assertEquals(7, source.size()); - assertThat(source.get("state_uuid"), notNullValue()); - - final Map shard = (Map) source.get("shard"); - assertEquals(6, shard.size()); - - final String currentNodeId = (String) shard.get("node"); - if (Strings.hasLength(currentNodeId)) { - assertThat(source.get("source_node"), notNullValue()); - } else { - assertThat(source.get("source_node"), nullValue()); - } - - ShardMonitoringDoc.XCONTENT_FILTERS.forEach(filter -> { - if (filter.equals("shard.relocating_node")) { - // Shard's relocating node is null most of the time in this test, we only check that the field is here - assertTrue(filter + " must exist in the monitoring document", shard.containsKey("relocating_node")); - return; - } - if (filter.equals("shard.node")) { - // Current node is null for replicas in this test, we only check that the field is here - assertTrue(filter + " must exist in the monitoring document", shard.containsKey("node")); - return; - } - assertThat(filter + " must not be null in the monitoring document", extractValue(filter, source), notNullValue()); - }); - } - /** * Executes the given {@link Runnable} once the monitoring exporters are ready and functional. Ensure that * the exporters and the monitoring service are shut down after the runnable has been executed. diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkActionTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkActionTests.java index 6654a9a8c005..4f83898e48a6 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkActionTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/rest/action/RestMonitoringBulkActionTests.java @@ -148,13 +148,6 @@ public class RestMonitoringBulkActionTests extends ESTestCase { return randomFrom(MonitoredSystem.LOGSTASH, MonitoredSystem.KIBANA, MonitoredSystem.BEATS); } - /** - * Returns a {@link String} representing a {@link MonitoredSystem} supported by the Monitoring Bulk API - */ - private static String randomSystemId() { - return randomSystem().getSystem(); - } - private void prepareRequest(final RestRequest restRequest) throws Exception { final NodeClient client = mock(NodeClient.class); final CheckedConsumer consumer = action.prepareRequest(restRequest, client); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java index 6f24257ffe0b..9d98605732d0 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java @@ -403,7 +403,7 @@ public class RollupJobTaskTests extends ESTestCase { } @SuppressWarnings("unchecked") - public void testTriggerWithoutHeaders() throws InterruptedException { + public void testTriggerWithoutHeaders() throws Exception { final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); Client client = mock(Client.class); @@ -470,7 +470,7 @@ public class RollupJobTaskTests extends ESTestCase { fail("Should not have entered onFailure"); } }); - ESTestCase.awaitBusy(started::get); + assertBusy(() -> assertTrue(started.get())); task.triggered(new SchedulerEngine.Event(RollupJobTask.SCHEDULE_NAME + "_" + job.getConfig().getId(), 123, 123)); assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.INDEXING)); @@ -479,11 +479,11 @@ public class RollupJobTaskTests extends ESTestCase { latch.countDown(); // Wait for the final persistent status to finish - ESTestCase.awaitBusy(finished::get); + assertBusy(() -> assertTrue(finished.get())); } @SuppressWarnings("unchecked") - public void testTriggerWithHeaders() throws InterruptedException { + public void testTriggerWithHeaders() throws Exception { final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); Map headers = new HashMap<>(1); headers.put("es-security-runas-user", "foo"); @@ -556,7 +556,7 @@ public class RollupJobTaskTests extends ESTestCase { fail("Should not have entered onFailure"); } }); - ESTestCase.awaitBusy(started::get); + assertBusy(() -> assertTrue(started.get())); task.triggered(new SchedulerEngine.Event(RollupJobTask.SCHEDULE_NAME + "_" + job.getConfig().getId(), 123, 123)); assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.INDEXING)); @@ -565,11 +565,11 @@ public class RollupJobTaskTests extends ESTestCase { latch.countDown(); // Wait for the final persistent status to finish - ESTestCase.awaitBusy(finished::get); + assertBusy(() -> assertTrue(finished.get())); } @SuppressWarnings("unchecked") - public void testSaveStateChangesIDScheme() throws InterruptedException { + public void testSaveStateChangesIDScheme() throws Exception { final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); Map headers = new HashMap<>(1); headers.put("es-security-runas-user", "foo"); @@ -643,7 +643,7 @@ public class RollupJobTaskTests extends ESTestCase { fail("Should not have entered onFailure"); } }); - ESTestCase.awaitBusy(started::get); + assertBusy(() -> assertTrue(started.get())); task.triggered(new SchedulerEngine.Event(RollupJobTask.SCHEDULE_NAME + "_" + job.getConfig().getId(), 123, 123)); assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.INDEXING)); @@ -652,7 +652,7 @@ public class RollupJobTaskTests extends ESTestCase { latch.countDown(); // Wait for the final persistent status to finish - ESTestCase.awaitBusy(finished::get); + assertBusy(() -> assertTrue(finished.get())); } public void testStopWhenStopped() throws InterruptedException { diff --git a/x-pack/plugin/search-business-rules/src/test/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderTests.java b/x-pack/plugin/search-business-rules/src/test/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderTests.java index 57db7972655e..db3d46fc1a7d 100644 --- a/x-pack/plugin/search-business-rules/src/test/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderTests.java +++ b/x-pack/plugin/search-business-rules/src/test/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderTests.java @@ -18,9 +18,9 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -89,7 +89,7 @@ public class PinnedQueryBuilderTests extends AbstractQueryTestCase createComponents(Client client, ThreadPool threadPool, ClusterService clusterService, - ResourceWatcherService resourceWatcherService, ScriptService scriptService) throws Exception { + ResourceWatcherService resourceWatcherService, ScriptService scriptService, + NamedXContentRegistry xContentRegistry) throws Exception { if (enabled == false) { return Collections.singletonList(new SecurityUsageServices(null, null, null, null)); } @@ -406,7 +407,8 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw dlsBitsetCache.set(new DocumentSubsetBitsetCache(settings)); final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(settings); - final FileRolesStore fileRolesStore = new FileRolesStore(settings, env, resourceWatcherService, getLicenseState()); + final FileRolesStore fileRolesStore = new FileRolesStore(settings, env, resourceWatcherService, getLicenseState(), + xContentRegistry); final NativeRolesStore nativeRolesStore = new NativeRolesStore(settings, client, getLicenseState(), securityIndex.get()); final ReservedRolesStore reservedRolesStore = new ReservedRolesStore(); List, ActionListener>> rolesProviders = new ArrayList<>(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportCreateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportCreateApiKeyAction.java index 09612c5e01f0..72a92516e59e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportCreateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportCreateApiKeyAction.java @@ -6,11 +6,13 @@ package org.elasticsearch.xpack.security.action; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; @@ -18,6 +20,8 @@ import org.elasticsearch.xpack.core.security.action.CreateApiKeyAction; import org.elasticsearch.xpack.core.security.action.CreateApiKeyRequest; import org.elasticsearch.xpack.core.security.action.CreateApiKeyResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator; import org.elasticsearch.xpack.security.authc.ApiKeyService; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; @@ -32,14 +36,16 @@ public final class TransportCreateApiKeyAction extends HandledTransportAction) CreateApiKeyRequest::new); this.apiKeyService = apiKeyService; this.securityContext = context; this.rolesStore = rolesStore; + this.xContentRegistry = xContentRegistry; } @Override @@ -49,7 +55,17 @@ public final class TransportCreateApiKeyAction extends HandledTransportAction(Arrays.asList(authentication.getUser().roles())), - ActionListener.wrap(roleDescriptors -> apiKeyService.createApiKey(authentication, request, roleDescriptors, listener), + ActionListener.wrap(roleDescriptors -> { + for (RoleDescriptor rd : roleDescriptors) { + try { + DLSRoleQueryValidator.validateQueryField(rd.getIndicesPrivileges(), xContentRegistry); + } catch (ElasticsearchException | IllegalArgumentException e) { + listener.onFailure(e); + return; + } + } + apiKeyService.createApiKey(authentication, request, roleDescriptors, listener); + }, listener::onFailure)); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java index c0a91bcdb021..300c8c835ffc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java @@ -5,26 +5,32 @@ */ package org.elasticsearch.xpack.security.action.role; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; +import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; public class TransportPutRoleAction extends HandledTransportAction { private final NativeRolesStore rolesStore; + private final NamedXContentRegistry xContentRegistry; @Inject - public TransportPutRoleAction(ActionFilters actionFilters, NativeRolesStore rolesStore, TransportService transportService) { + public TransportPutRoleAction(ActionFilters actionFilters, NativeRolesStore rolesStore, TransportService transportService, + NamedXContentRegistry xContentRegistry) { super(PutRoleAction.NAME, transportService, actionFilters, PutRoleRequest::new); this.rolesStore = rolesStore; + this.xContentRegistry = xContentRegistry; } @Override @@ -35,6 +41,13 @@ public class TransportPutRoleAction extends HandledTransportAction() { @Override public void onResponse(Boolean created) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java index 5c8da3828559..4b6cd89adea8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java @@ -49,14 +49,14 @@ public final class TransportSamlPrepareAuthenticationAction } else if (realms.size() > 1) { listener.onFailure(SamlUtils.samlException("Found multiple matching realms [{}] for [{}]", realms, request)); } else { - prepareAuthentication(realms.get(0), listener); + prepareAuthentication(realms.get(0), request.getRelayState(), listener); } } - private void prepareAuthentication(SamlRealm realm, ActionListener listener) { + private void prepareAuthentication(SamlRealm realm, String relayState, ActionListener listener) { final AuthnRequest authnRequest = realm.buildAuthenticationRequest(); try { - String redirectUrl = new SamlRedirect(authnRequest, realm.getSigningConfiguration()).getRedirectUrl(); + String redirectUrl = new SamlRedirect(authnRequest, realm.getSigningConfiguration()).getRedirectUrl(relayState); listener.onResponse(new SamlPrepareAuthenticationResponse( realm.name(), authnRequest.getID(), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java index ae400172bf11..7401463bf574 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java @@ -8,10 +8,13 @@ package org.elasticsearch.xpack.security.action.user; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; @@ -36,15 +39,20 @@ public class TransportHasPrivilegesAction extends HandledTransportAction authorizationService.checkPrivileges(authentication, request, applicationPrivilegeDescriptors, listener), listener::onFailure)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRedirect.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRedirect.java index b728fb03bcdd..1c46a89478f2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRedirect.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlRedirect.java @@ -25,7 +25,7 @@ public class SamlRedirect { private final SAMLObject samlObject; private final String destination; private final String parameterName; - private final SigningConfiguration signing; + private final SigningConfiguration signing; public SamlRedirect(RequestAbstractType request, SigningConfiguration signing) { this.samlObject = request; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java index 77b539e593b8..1e3e91871108 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java @@ -203,11 +203,15 @@ class IndicesAndAliasesResolver { } else { resolvedIndicesBuilder.addLocal(aliasesRequest.aliases()); } - // if no aliases are authorized, then fill in an expression that - // MetaData#findAliases evaluates to the empty alias list. You cannot put - // "nothing" (the empty list) explicitly because this is resolved by es core to - // _all - if (aliasesRequest.aliases().length == 0) { + /* + * If no aliases are authorized, then fill in an expression that MetaData#findAliases evaluates to an + * empty alias list. We can not put an empty list here because core resolves this as _all. For other + * request types, this replacement is not needed and can trigger issues when we rewrite the request + * on the coordinating node. For example, for a remove index request, if we did this replacement, + * the request would be rewritten to include "*","-*" and for a user that does not have permissions + * on "*", the master node would not authorize the request. + */ + if (aliasesRequest.expandAliasesWildcards() && aliasesRequest.aliases().length == 0) { aliasesRequest.replaceAliases(NO_INDICES_OR_ALIASES_ARRAY); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java index a2be72dc6d63..b1059c46cc66 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Nullable; @@ -29,6 +30,7 @@ import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; +import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator; import org.elasticsearch.xpack.core.security.support.NoOpLogger; import org.elasticsearch.xpack.core.security.support.Validation; @@ -61,27 +63,30 @@ public class FileRolesStore implements BiConsumer, ActionListener>> listeners = new ArrayList<>(); private volatile Map permissions; - public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, XPackLicenseState licenseState) + public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, XPackLicenseState licenseState, + NamedXContentRegistry xContentRegistry) throws IOException { - this(settings, env, watcherService, null, licenseState); + this(settings, env, watcherService, null, licenseState, xContentRegistry); } FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, Consumer> listener, - XPackLicenseState licenseState) throws IOException { + XPackLicenseState licenseState, NamedXContentRegistry xContentRegistry) throws IOException { this.settings = settings; this.file = resolveFile(env); if (listener != null) { listeners.add(listener); } this.licenseState = licenseState; + this.xContentRegistry = xContentRegistry; FileWatcher watcher = new FileWatcher(file.getParent()); watcher.addListener(new FileListener()); watcherService.add(watcher, ResourceWatcherService.Frequency.HIGH); - permissions = parseFile(file, logger, settings, licenseState); + permissions = parseFile(file, logger, settings, licenseState, xContentRegistry); } @@ -150,15 +155,17 @@ public class FileRolesStore implements BiConsumer, ActionListener parseFileForRoleNames(Path path, Logger logger) { - return parseRoleDescriptors(path, logger, false, Settings.EMPTY).keySet(); + // EMPTY is safe here because we never use namedObject as we are just parsing role names + return parseRoleDescriptors(path, logger, false, Settings.EMPTY, NamedXContentRegistry.EMPTY).keySet(); } - public static Map parseFile(Path path, Logger logger, Settings settings, XPackLicenseState licenseState) { - return parseFile(path, logger, true, settings, licenseState); + public static Map parseFile(Path path, Logger logger, Settings settings, XPackLicenseState licenseState, + NamedXContentRegistry xContentRegistry) { + return parseFile(path, logger, true, settings, licenseState, xContentRegistry); } - public static Map parseFile(Path path, Logger logger, boolean resolvePermission, - Settings settings, XPackLicenseState licenseState) { + public static Map parseFile(Path path, Logger logger, boolean resolvePermission, Settings settings, + XPackLicenseState licenseState, NamedXContentRegistry xContentRegistry) { if (logger == null) { logger = NoOpLogger.INSTANCE; } @@ -170,7 +177,7 @@ public class FileRolesStore implements BiConsumer, ActionListener roleSegments = roleSegments(path); final boolean flsDlsLicensed = licenseState.isDocumentAndFieldLevelSecurityAllowed(); for (String segment : roleSegments) { - RoleDescriptor descriptor = parseRoleDescriptor(segment, path, logger, resolvePermission, settings); + RoleDescriptor descriptor = parseRoleDescriptor(segment, path, logger, resolvePermission, settings, xContentRegistry); if (descriptor != null) { if (ReservedRolesStore.isReserved(descriptor.getName())) { logger.warn("role [{}] is reserved. the relevant role definition in the mapping file will be ignored", @@ -202,7 +209,8 @@ public class FileRolesStore implements BiConsumer, ActionListener parseRoleDescriptors(Path path, Logger logger, boolean resolvePermission, Settings settings) { + public static Map parseRoleDescriptors(Path path, Logger logger, boolean resolvePermission, Settings settings, + NamedXContentRegistry xContentRegistry) { if (logger == null) { logger = NoOpLogger.INSTANCE; } @@ -213,7 +221,7 @@ public class FileRolesStore implements BiConsumer, ActionListener roleSegments = roleSegments(path); for (String segment : roleSegments) { - RoleDescriptor rd = parseRoleDescriptor(segment, path, logger, resolvePermission, settings); + RoleDescriptor rd = parseRoleDescriptor(segment, path, logger, resolvePermission, settings, xContentRegistry); if (rd != null) { roles.put(rd.getName(), rd); } @@ -231,12 +239,12 @@ public class FileRolesStore implements BiConsumer, ActionListener, ActionListener, ActionListener) () -> new ParameterizedMessage( + "invalid role definition [{}] in roles file [{}]. failed to validate query field. skipping role...", roleName, + path.toAbsolutePath()), e); + return null; + } } } return descriptor; @@ -350,7 +367,7 @@ public class FileRolesStore implements BiConsumer, ActionListener previousPermissions = permissions; try { - permissions = parseFile(file, logger, settings, licenseState); + permissions = parseFile(file, logger, settings, licenseState, xContentRegistry); logger.info("updated roles (roles file [{}] {})", file.toAbsolutePath(), Files.exists(file) ? "changed" : "removed"); } catch (Exception e) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java index 8aea63dc34b3..45cd8de8ac97 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java @@ -177,12 +177,13 @@ public class NativePrivilegeStore { } final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); if (termsQuery != null) { - boolQuery.filter(termsQuery); + boolQuery.should(termsQuery); } for (String wildcard : wildcardNames) { final String prefix = wildcard.substring(0, wildcard.length() - 1); - boolQuery.filter(QueryBuilders.prefixQuery(APPLICATION.getPreferredName(), prefix)); + boolQuery.should(QueryBuilders.prefixQuery(APPLICATION.getPreferredName(), prefix)); } + boolQuery.minimumShouldMatch(1); return boolQuery; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlPrepareAuthenticationAction.java index 84ff7ba1edf2..b227f2c767f6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/saml/RestSamlPrepareAuthenticationAction.java @@ -44,6 +44,7 @@ public class RestSamlPrepareAuthenticationAction extends SamlBaseRestHandler { static { PARSER.declareString(SamlPrepareAuthenticationRequest::setAssertionConsumerServiceURL, new ParseField("acs")); PARSER.declareString(SamlPrepareAuthenticationRequest::setRealmName, new ParseField("realm")); + PARSER.declareString(SamlPrepareAuthenticationRequest::setRelayState, new ParseField("relay_state")); } public RestSamlPrepareAuthenticationAction(Settings settings, RestController controller, XPackLicenseState licenseState) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/SecurityCachePermissionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/SecurityCachePermissionTests.java index ffef039484e3..c15b2b580f9b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/SecurityCachePermissionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/SecurityCachePermissionTests.java @@ -53,7 +53,7 @@ public class SecurityCachePermissionTests extends SecurityIntegTestCase { public void testThatTermsFilterQueryDoesntLeakData() { SearchResponse response = client().prepareSearch("data").setQuery(QueryBuilders.constantScoreQuery( - QueryBuilders.termsLookupQuery("token", new TermsLookup("tokens", "tokens", "1", "tokens")))) + QueryBuilders.termsLookupQuery("token", new TermsLookup("tokens", "1", "tokens")))) .execute().actionGet(); assertThat(response.isTimedOut(), is(false)); assertThat(response.getHits().getHits().length, is(1)); @@ -64,7 +64,7 @@ public class SecurityCachePermissionTests extends SecurityIntegTestCase { SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING))) .prepareSearch("data") .setQuery(QueryBuilders.constantScoreQuery( - QueryBuilders.termsLookupQuery("token", new TermsLookup("tokens", "tokens", "1", "tokens")))) + QueryBuilders.termsLookupQuery("token", new TermsLookup("tokens", "1", "tokens")))) .execute().actionGet(); fail("search phase exception should have been thrown! response was:\n" + response.toString()); } catch (ElasticsearchSecurityException e) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java index 37be275f3615..3d4dcd50e2a6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/license/LicensingTests.java @@ -119,7 +119,7 @@ public class LicensingTests extends SecurityIntegTestCase { } @Before - public void resetLicensing() throws InterruptedException { + public void resetLicensing() throws Exception { enableLicensing(OperationMode.MISSING); } @@ -236,64 +236,50 @@ public class LicensingTests extends SecurityIntegTestCase { assertThat(ee.status(), is(RestStatus.FORBIDDEN)); } - private void disableLicensing() throws InterruptedException { + private void disableLicensing() throws Exception { // This method first makes sure licensing is enabled everywhere so that we can execute // monitoring actions to ensure we have a stable cluster and only then do we disable. - // This is done in an await busy since there is a chance that the enabling of the license + // This is done in an assertBusy since there is a chance that the enabling of the license // is overwritten by some other cluster activity and the node throws an exception while we // wait for things to stabilize! - final boolean success = awaitBusy(() -> { - try { - for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) { - if (licenseState.isAuthAllowed() == false) { - enableLicensing(OperationMode.BASIC); - break; - } + assertBusy(() -> { + for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) { + if (licenseState.isAuthAllowed() == false) { + enableLicensing(OperationMode.BASIC); + break; } - - ensureGreen(); - ensureClusterSizeConsistency(); - ensureClusterStateConsistency(); - - // apply the disabling of the license once the cluster is stable - for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) { - licenseState.update(OperationMode.BASIC, false, null); - } - } catch (Exception e) { - logger.error("Caught exception while disabling license", e); - return false; } - return true; + + ensureGreen(); + ensureClusterSizeConsistency(); + ensureClusterStateConsistency(); + + // apply the disabling of the license once the cluster is stable + for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) { + licenseState.update(OperationMode.BASIC, false, null); + } }, 30L, TimeUnit.SECONDS); - assertTrue(success); } - private void enableLicensing(License.OperationMode operationMode) throws InterruptedException { + private void enableLicensing(License.OperationMode operationMode) throws Exception { // do this in an await busy since there is a chance that the enabling of the license is // overwritten by some other cluster activity and the node throws an exception while we // wait for things to stabilize! - final boolean success = awaitBusy(() -> { - try { - // first update the license so we can execute monitoring actions - for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) { - licenseState.update(operationMode, true, null); - } - - ensureGreen(); - ensureClusterSizeConsistency(); - ensureClusterStateConsistency(); - - // re-apply the update in case any node received an updated cluster state that triggered the license state - // to change - for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) { - licenseState.update(operationMode, true, null); - } - } catch (Exception e) { - logger.error("Caught exception while enabling license", e); - return false; + assertBusy(() -> { + // first update the license so we can execute monitoring actions + for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) { + licenseState.update(operationMode, true, null); + } + + ensureGreen(); + ensureClusterSizeConsistency(); + ensureClusterStateConsistency(); + + // re-apply the update in case any node received an updated cluster state that triggered the license state + // to change + for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) { + licenseState.update(operationMode, true, null); } - return true; }, 30L, TimeUnit.SECONDS); - assertTrue(success); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 4d8dca8e095a..364b32f18b35 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -128,7 +128,8 @@ public class SecurityTests extends ESTestCase { Client client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); when(client.settings()).thenReturn(settings); - return security.createComponents(client, threadPool, clusterService, mock(ResourceWatcherService.class), mock(ScriptService.class)); + return security.createComponents(client, threadPool, clusterService, mock(ResourceWatcherService.class), mock(ScriptService.class), + xContentRegistry()); } private static T findComponent(Class type, Collection components) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/PutRoleBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/PutRoleBuilderTests.java index ba305e15ed76..33fe3259b3cb 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/PutRoleBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/PutRoleBuilderTests.java @@ -32,4 +32,4 @@ public class PutRoleBuilderTests extends ESTestCase { "[role1], use [\"field_security\": {\"grant\":[...],\"except\":[...]}] instead")); } } -} \ No newline at end of file +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java index 3cbb7782688e..036ac75459b6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java @@ -5,10 +5,21 @@ */ package org.elasticsearch.xpack.security.action.role; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.join.query.HasChildQueryBuilder; +import org.elasticsearch.join.query.HasParentQueryBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; @@ -23,6 +34,7 @@ import org.mockito.stubbing.Answer; import java.util.ArrayList; import java.util.Collections; +import java.util.List; import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.containsString; @@ -41,12 +53,26 @@ import static org.mockito.Mockito.verifyZeroInteractions; public class TransportPutRoleActionTests extends ESTestCase { + @Override + protected NamedXContentRegistry xContentRegistry() { + return new NamedXContentRegistry(List.of( + new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(MatchAllQueryBuilder.NAME), + (p, c) -> MatchAllQueryBuilder.fromXContent(p)), + new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(HasChildQueryBuilder.NAME), + (p, c) -> HasChildQueryBuilder.fromXContent(p)), + new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(HasParentQueryBuilder.NAME), + (p, c) -> HasParentQueryBuilder.fromXContent(p)), + new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(TermQueryBuilder.NAME), + (p, c) -> TermQueryBuilder.fromXContent(p)))); + } + public void testReservedRole() { final String roleName = randomFrom(new ArrayList<>(ReservedRolesStore.names())); NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService); + TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService, + xContentRegistry()); PutRoleRequest request = new PutRoleRequest(); request.name(roleName); @@ -76,7 +102,8 @@ public class TransportPutRoleActionTests extends ESTestCase { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService); + TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService, + xContentRegistry()); final boolean created = randomBoolean(); PutRoleRequest request = new PutRoleRequest(); @@ -119,7 +146,8 @@ public class TransportPutRoleActionTests extends ESTestCase { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService); + TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService, + xContentRegistry()); PutRoleRequest request = new PutRoleRequest(); request.name(roleName); @@ -154,4 +182,78 @@ public class TransportPutRoleActionTests extends ESTestCase { assertThat(throwableRef.get(), is(sameInstance(e))); verify(rolesStore, times(1)).putRole(eq(request), any(RoleDescriptor.class), any(ActionListener.class)); } + + public void testCreationOfRoleWithMalformedQueryJsonFails() { + NativeRolesStore rolesStore = mock(NativeRolesStore.class); + TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); + TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService, + xContentRegistry()); + PutRoleRequest request = new PutRoleRequest(); + request.name("test"); + String[] malformedQueryJson = new String[]{"{ \"match_all\": { \"unknown_field\": \"\" } }", + "{ malformed JSON }", + "{ \"unknown\": {\"\"} }", + "{}"}; + BytesReference query = new BytesArray(randomFrom(malformedQueryJson)); + request.addIndex(new String[]{"idx1"}, new String[]{"read"}, null, null, query, randomBoolean()); + + final AtomicReference throwableRef = new AtomicReference<>(); + final AtomicReference responseRef = new AtomicReference<>(); + action.doExecute(mock(Task.class), request, new ActionListener() { + @Override + public void onResponse(PutRoleResponse response) { + responseRef.set(response); + } + + @Override + public void onFailure(Exception e) { + throwableRef.set(e); + } + }); + + assertThat(responseRef.get(), is(nullValue())); + assertThat(throwableRef.get(), is(notNullValue())); + Throwable t = throwableRef.get(); + assertThat(t, instanceOf(ElasticsearchParseException.class)); + assertThat(t.getMessage(), containsString("failed to parse field 'query' for indices [" + + Strings.arrayToCommaDelimitedString(new String[]{"idx1"}) + + "] at index privilege [0] of role descriptor")); + } + + public void testCreationOfRoleWithUnsupportedQueryFails() throws Exception { + NativeRolesStore rolesStore = mock(NativeRolesStore.class); + TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); + TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService, + xContentRegistry()); + PutRoleRequest request = new PutRoleRequest(); + request.name("test"); + String hasChildQuery = "{ \"has_child\": { \"type\": \"child\", \"query\": { \"match_all\": {} } } }"; + String hasParentQuery = "{ \"has_parent\": { \"parent_type\": \"parent\", \"query\": { \"match_all\": {} } } }"; + BytesReference query = new BytesArray(randomFrom(hasChildQuery, hasParentQuery)); + request.addIndex(new String[]{"idx1"}, new String[]{"read"}, null, null, query, randomBoolean()); + + final AtomicReference throwableRef = new AtomicReference<>(); + final AtomicReference responseRef = new AtomicReference<>(); + action.doExecute(mock(Task.class), request, new ActionListener() { + @Override + public void onResponse(PutRoleResponse response) { + responseRef.set(response); + } + + @Override + public void onFailure(Exception e) { + throwableRef.set(e); + } + }); + + assertThat(responseRef.get(), is(nullValue())); + assertThat(throwableRef.get(), is(notNullValue())); + Throwable t = throwableRef.get(); + assertThat(t, instanceOf(ElasticsearchParseException.class)); + assertThat(t.getMessage(), containsString("failed to parse field 'query' for indices [" + + Strings.arrayToCommaDelimitedString(new String[]{"idx1"}) + + "] at index privilege [0] of role descriptor")); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/SamlPrepareAuthenticationRequestTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/SamlPrepareAuthenticationRequestTests.java index c61021515203..20138dc9a79b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/SamlPrepareAuthenticationRequestTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/SamlPrepareAuthenticationRequestTests.java @@ -18,6 +18,7 @@ public class SamlPrepareAuthenticationRequestTests extends SamlTestCase { final SamlPrepareAuthenticationRequest req = new SamlPrepareAuthenticationRequest(); req.setRealmName("saml1"); req.setAssertionConsumerServiceURL("https://sp.example.com/sso/saml2/post"); + req.setRelayState("the_relay_state"); serialiseAndValidate(req); } @@ -25,6 +26,7 @@ public class SamlPrepareAuthenticationRequestTests extends SamlTestCase { final SamlPrepareAuthenticationRequest req = new SamlPrepareAuthenticationRequest(); req.setRealmName(null); req.setAssertionConsumerServiceURL(null); + req.setRelayState(null); serialiseAndValidate(req); } @@ -36,7 +38,8 @@ public class SamlPrepareAuthenticationRequestTests extends SamlTestCase { assertThat(req2.getRealmName(), Matchers.equalTo(req1.getRealmName())); assertThat(req2.getAssertionConsumerServiceURL(), Matchers.equalTo(req1.getAssertionConsumerServiceURL())); + assertThat(req2.getRelayState(), Matchers.equalTo(req1.getRelayState())); assertThat(req2.getParentTask(), Matchers.equalTo(req1.getParentTask())); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilderTests.java index 0b9de2da3328..612437a4b992 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/HasPrivilegesRequestBuilderTests.java @@ -116,4 +116,4 @@ public class HasPrivilegesRequestBuilderTests extends ESTestCase { ); assertThat(parseException.getMessage(), containsString("[cluster,index,applications] are missing")); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java new file mode 100644 index 000000000000..7cd2ed7a2f73 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.action.user; + +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.AuthenticationField; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.authz.AuthorizationService; +import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore; +import org.junit.After; +import org.junit.Before; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class TransportHasPrivilegesActionTests extends ESTestCase { + private ThreadPool threadPool; + + @Before + public void createThreadPool() { + threadPool = new TestThreadPool("has privileges action tests"); + } + + @After + public void stopThreadPool() { + terminate(threadPool); + } + + public void testHasPrivilegesRequestDoesNotAllowDLSRoleQueryBasedIndicesPrivileges() { + final ThreadContext threadContext = threadPool.getThreadContext(); + final SecurityContext context = mock(SecurityContext.class); + final User user = new User("user-1", "superuser"); + final Authentication authentication = new Authentication(user, + new Authentication.RealmRef("native", "default_native", "node1"), null); + when(context.getAuthentication()).thenReturn(authentication); + threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); + final TransportHasPrivilegesAction transportHasPrivilegesAction = new TransportHasPrivilegesAction(threadPool, + mock(TransportService.class), mock(ActionFilters.class), mock(AuthorizationService.class), mock(NativePrivilegeStore.class), + context, xContentRegistry()); + + final HasPrivilegesRequest request = new HasPrivilegesRequest(); + final RoleDescriptor.IndicesPrivileges[] indicesPrivileges = new RoleDescriptor.IndicesPrivileges[randomIntBetween(1, 5)]; + for (int i = 0; i < indicesPrivileges.length; i++) { + indicesPrivileges[i] = RoleDescriptor.IndicesPrivileges.builder() + .privileges(randomFrom("read", "write")) + .indices(randomAlphaOfLengthBetween(2, 8)) + .query(new BytesArray(randomAlphaOfLength(5))) + .build(); + } + request.indexPrivileges(indicesPrivileges); + request.username("user-1"); + + final PlainActionFuture listener = new PlainActionFuture<>(); + transportHasPrivilegesAction.doExecute(mock(Task.class), request, listener); + + final IllegalArgumentException ile = expectThrows(IllegalArgumentException.class, () -> listener.actionGet()); + assertThat(ile, notNullValue()); + assertThat(ile.getMessage(), containsString("users may only check the index privileges without any DLS role query")); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 4c9e944c14f0..cc2aedd0205e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -83,7 +83,7 @@ public class ApiKeyIntegTests extends SecurityIntegTestCase { } @After - public void wipeSecurityIndex() throws InterruptedException { + public void wipeSecurityIndex() throws Exception { // get the api key service and wait until api key expiration is not in progress! awaitApiKeysRemoverCompletion(); deleteSecurityIndex(); @@ -114,10 +114,9 @@ public class ApiKeyIntegTests extends SecurityIntegTestCase { "manage_own_api_key_role:user_with_manage_own_api_key_role\n"; } - private void awaitApiKeysRemoverCompletion() throws InterruptedException { + private void awaitApiKeysRemoverCompletion() throws Exception { for (ApiKeyService apiKeyService : internalCluster().getInstances(ApiKeyService.class)) { - final boolean done = awaitBusy(() -> apiKeyService.isExpirationInProgress() == false); - assertTrue(done); + assertBusy(() -> assertFalse(apiKeyService.isExpirationInProgress())); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java index 59bd9f04fedf..98f09ef631e9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java @@ -464,11 +464,10 @@ public class TokenAuthIntegTests extends SecurityIntegTestCase { } @After - public void wipeSecurityIndex() throws InterruptedException { + public void wipeSecurityIndex() throws Exception { // get the token service and wait until token expiration is not in progress! for (TokenService tokenService : internalCluster().getInstances(TokenService.class)) { - final boolean done = awaitBusy(() -> tokenService.isExpirationInProgress() == false); - assertTrue(done); + assertBusy(() -> assertFalse(tokenService.isExpirationInProgress())); } super.deleteSecurityIndex(); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java index 64d6cfd938f8..1975c9c12ac6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java @@ -202,7 +202,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { .cluster("all", "none") .runAs("root", "nobody") .addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null, - new BytesArray("{\"query\": {\"match_all\": {}}}"), randomBoolean()) + new BytesArray("{\"match_all\": {}}"), randomBoolean()) .metadata(metadata) .get(); logger.error("--> waiting for .security index"); @@ -219,13 +219,13 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { .cluster("all", "none") .runAs("root", "nobody") .addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null, - new BytesArray("{\"query\": {\"match_all\": {}}}"), randomBoolean()) + new BytesArray("{\"match_all\": {}}"), randomBoolean()) .get(); preparePutRole("test_role3") .cluster("all", "none") .runAs("root", "nobody") .addIndices(new String[]{"index"}, new String[]{"read"}, new String[]{"body", "title"}, null, - new BytesArray("{\"query\": {\"match_all\": {}}}"), randomBoolean()) + new BytesArray("{\"match_all\": {}}"), randomBoolean()) .get(); logger.info("--> retrieving all roles"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolTests.java index 4a923272371b..adb4fc58d3ed 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolTests.java @@ -12,19 +12,15 @@ import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.UserException; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.CheckedSupplier; -import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.KeyStoreWrapper; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.DeprecationHandler; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.protocol.xpack.XPackInfoResponse; @@ -443,21 +439,6 @@ public class SetupPasswordToolTests extends CommandTestCase { } } - private String parsePassword(String value) throws IOException { - try (XContentParser parser = JsonXContent.jsonXContent - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, value)) { - XContentParser.Token token = parser.nextToken(); - if (token == XContentParser.Token.START_OBJECT) { - if (parser.nextToken() == XContentParser.Token.FIELD_NAME) { - if (parser.nextToken() == XContentParser.Token.VALUE_STRING) { - return parser.text(); - } - } - } - } - throw new RuntimeException("Did not properly parse password."); - } - private URL authenticateUrl(URL url) throws MalformedURLException, URISyntaxException { return new URL(url, (url.toURI().getPath() + "/_security/_authenticate").replaceAll("/+", "/") + "?pretty"); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactoryLoadBalancingTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactoryLoadBalancingTests.java index ff080be728db..7c80de3ace4f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactoryLoadBalancingTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/SessionFactoryLoadBalancingTests.java @@ -320,7 +320,7 @@ public class SessionFactoryLoadBalancingTests extends LdapTestCase { final List openedSockets = new ArrayList<>(); final List blacklistedAddress = new ArrayList<>(); try { - final boolean allSocketsOpened = awaitBusy(() -> { + final boolean allSocketsOpened = waitUntil(() -> { try { InetAddress[] allAddresses = InetAddressHelper.getAllAddresses(); if (serverAddress instanceof Inet4Address) { @@ -337,10 +337,7 @@ public class SessionFactoryLoadBalancingTests extends LdapTestCase { final Socket socket = openMockSocket(serverAddress, serverPort, localAddress, portToBind); openedSockets.add(socket); logger.debug("opened socket [{}]", socket); - } catch (NoRouteToHostException e) { - logger.debug(new ParameterizedMessage("blacklisting address [{}] due to:", localAddress), e); - blacklistedAddress.add(localAddress); - } catch (ConnectException e) { + } catch (NoRouteToHostException | ConnectException e) { logger.debug(new ParameterizedMessage("blacklisting address [{}] due to:", localAddress), e); blacklistedAddress.add(localAddress); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java index 711ca517d98c..a6216ea2665f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java @@ -37,6 +37,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { ("test123".toCharArray()))); return super.configUsers() + "create_only:" + usersPasswdHashed + "\n" + + "all_on_test:" + usersPasswdHashed + "\n" + "create_test_aliases_test:" + usersPasswdHashed + "\n" + "create_test_aliases_alias:" + usersPasswdHashed + "\n" + "create_test_aliases_test_alias:" + usersPasswdHashed + "\n" + @@ -47,6 +48,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { protected String configUsersRoles() { return super.configUsersRoles() + "create_only:create_only\n" + + "all_on_test:all_on_test\n" + "create_test_aliases_test:create_test_aliases_test\n" + "create_test_aliases_alias:create_test_aliases_alias\n" + "create_test_aliases_test_alias:create_test_aliases_test_alias\n" + @@ -61,6 +63,10 @@ public class IndexAliasesTests extends SecurityIntegTestCase { " indices:\n" + " - names: '*'\n" + " privileges: [ create_index ]\n" + + "all_on_test:\n" + + " indices:\n" + + " - names: 'test_*'\n" + + " privileges: [ all ]\n" + //role that has create index and manage_aliases on test_*, not enough to manage_aliases aliases outside of test_* namespace "create_test_aliases_test:\n" + " indices:\n" + @@ -89,23 +95,23 @@ public class IndexAliasesTests extends SecurityIntegTestCase { @Before public void createBogusIndex() { - if (randomBoolean()) { - //randomly create an index with two aliases from user admin, to make sure it doesn't affect any of the test results - assertAcked(client().admin().indices().prepareCreate("index1").addAlias(new Alias("alias1")).addAlias(new Alias("alias2"))); - } + //randomly create an index with two aliases from user admin, to make sure it doesn't affect any of the test results + assertAcked(client().admin().indices().prepareCreate("bogus_index_1").addAlias(new Alias("bogus_alias_1")) + .addAlias(new Alias("bogus_alias_2"))); } public void testCreateIndexThenAliasesCreateOnlyPermission() { //user has create permission only: allows to create indices, manage_aliases is required to add/remove aliases Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_only", new SecureString("test123".toCharArray()))); - assertAcked(client().filterWithHeader(headers).admin().indices().prepareCreate("test_1").get()); + final Client client = client().filterWithHeader(headers); + assertAcked(client.admin().indices().prepareCreate("test_1").get()); assertThrowsAuthorizationException( - client().filterWithHeader(headers).admin().indices().prepareAliases().addAlias("test_1", "test_alias")::get, + client.admin().indices().prepareAliases().addAlias("test_1", "test_alias")::get, IndicesAliasesAction.NAME, "create_only"); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() + assertThrowsAuthorizationException(client.admin().indices().prepareAliases() .addAlias("test_*", "test_alias")::get, IndicesAliasesAction.NAME, "create_only"); } @@ -116,7 +122,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { new SecureString("test123".toCharArray()))); assertThrowsAuthorizationException( - client().filterWithHeader(headers).admin().indices().prepareCreate("test_1").addAlias(new Alias("test_2"))::get, + client(headers).admin().indices().prepareCreate("test_1").addAlias(new Alias("test_2"))::get, IndicesAliasesAction.NAME, "create_only"); } @@ -124,15 +130,16 @@ public class IndexAliasesTests extends SecurityIntegTestCase { //user has create permission only: allows to create indices, manage_aliases is required to add/remove aliases Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_only", new SecureString("test123".toCharArray()))); + final Client client = client().filterWithHeader(headers); assertThrowsAuthorizationException( - client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "alias_1")::get, + client.admin().indices().prepareAliases().removeAlias("test_1", "alias_1")::get, IndicesAliasesAction.NAME, "create_only"); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() + assertThrowsAuthorizationException(client.admin().indices().prepareAliases() .removeAlias("test_1", "alias_*")::get, IndicesAliasesAction.NAME, "create_only"); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() + assertThrowsAuthorizationException(client.admin().indices().prepareAliases() .removeAlias("test_1", "_all")::get, IndicesAliasesAction.NAME, "create_only"); } @@ -140,24 +147,25 @@ public class IndexAliasesTests extends SecurityIntegTestCase { //user has create permission only: allows to create indices, manage_aliases is required to retrieve aliases though Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_only", new SecureString("test123".toCharArray()))); + final Client client = client().filterWithHeader(headers); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases("test_1") + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases("test_1") .setIndices("test_1").setIndicesOptions(IndicesOptions.strictExpand())::get, GetAliasesAction.NAME, "create_only"); - assertThrowsAuthorizationException(client().filterWithHeader(headers) + assertThrowsAuthorizationException(client .admin().indices().prepareGetAliases("_all") .setIndices("test_1").setIndicesOptions(IndicesOptions.strictExpand())::get, GetAliasesAction.NAME, "create_only"); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices() + assertThrowsAuthorizationException(client.admin().indices() .prepareGetAliases().setIndices("test_1").setIndicesOptions(IndicesOptions.strictExpand())::get, GetAliasesAction.NAME, "create_only"); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases("test_alias") + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases("test_alias") .setIndices("test_*").setIndicesOptions(IndicesOptions.strictExpand())::get, GetAliasesAction.NAME, "create_only"); //this throws exception no matter what the indices options are because the aliases part cannot be resolved to any alias //and there is no way to "allow_no_aliases" like we can do with indices. - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases()::get, + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases()::get, GetAliasesAction.NAME, "create_only"); } @@ -165,23 +173,30 @@ public class IndexAliasesTests extends SecurityIntegTestCase { //user has create permission only: allows to create indices, manage_aliases is required to retrieve aliases though Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_only", new SecureString("test123".toCharArray()))); + final Client client = client().filterWithHeader(headers); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases("test_1") + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases("test_1") .setIndices("test_1").setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases("_all") + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases("_all") .setIndices("test_1").setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareGetAliases().setIndices("test_1") + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases("alias*")::get, GetAliasesAction.NAME, "create_only"); + + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases().setIndices("test_1") .setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); assertThrowsAuthorizationException( - client().filterWithHeader(headers).admin().indices().prepareGetAliases("test_alias") + client.admin().indices().prepareGetAliases("test_alias") + .setIndices("test_*").setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); + + assertThrowsAuthorizationException( + client.admin().indices().prepareGetAliases() .setIndices("test_*").setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); //this throws exception no matter what the indices options are because the aliases part cannot be resolved to any alias //and there is no way to "allow_no_aliases" like we can do with indices. - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices() + assertThrowsAuthorizationException(client.admin().indices() .prepareGetAliases().setIndicesOptions(IndicesOptions.lenientExpandOpen())::get, GetAliasesAction.NAME, "create_only"); } @@ -190,17 +205,18 @@ public class IndexAliasesTests extends SecurityIntegTestCase { // indices Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_test", new SecureString("test123".toCharArray()))); + final Client client = client().filterWithHeader(headers); - assertAcked(client().filterWithHeader(headers).admin().indices().prepareCreate("test_1").get()); + assertAcked(client.admin().indices().prepareCreate("test_1").get()); //ok: user has manage_aliases on test_* - assertAcked(client().filterWithHeader(headers).admin().indices().prepareAliases().addAlias("test_1", "test_alias").get()); + assertAcked(client.admin().indices().prepareAliases().addAlias("test_1", "test_alias").get()); //ok: user has manage_aliases on test_* - assertAcked(client().filterWithHeader(headers).admin().indices().prepareAliases().addAlias("test_*", "test_alias_2").get()); + assertAcked(client.admin().indices().prepareAliases().addAlias("test_*", "test_alias_2").get()); //fails: user doesn't have manage_aliases on alias_1 - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() + assertThrowsAuthorizationException(client.admin().indices().prepareAliases() .addAlias("test_1", "alias_1").addAlias("test_1", "test_alias")::get, IndicesAliasesAction.NAME, "create_test_aliases_test"); } @@ -211,10 +227,12 @@ public class IndexAliasesTests extends SecurityIntegTestCase { //ok: user has manage_aliases on test_* Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_test", new SecureString("test123".toCharArray()))); - assertAcked(client().filterWithHeader(headers).admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias")).get()); + final Client client = client(headers); + + assertAcked(client.admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias")).get()); //fails: user doesn't have manage_aliases on alias_1 - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareCreate("test_2") + assertThrowsAuthorizationException(client.admin().indices().prepareCreate("test_2") .addAlias(new Alias("test_alias")).addAlias(new Alias("alias_2"))::get, IndicesAliasesAction.NAME, "create_test_aliases_test"); } @@ -225,38 +243,49 @@ public class IndexAliasesTests extends SecurityIntegTestCase { //ok: user has manage_aliases on test_* Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_test", new SecureString("test123".toCharArray()))); + final Client client = client(headers); - assertAcked(client().filterWithHeader(headers).admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias_1")) + assertAcked(client.admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias_1")) .addAlias(new Alias("test_alias_2")) .addAlias(new Alias("test_alias_3")).addAlias(new Alias("test_alias_4")).get()); //ok: user has manage_aliases on test_* - assertAcked(client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "test_alias_1").get()); + assertAcked(client.admin().indices().prepareAliases().removeAlias("test_1", "test_alias_1").get()); //ok: user has manage_aliases on test_* - assertAcked(client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_*", "test_alias_2").get()); + assertAcked(client.admin().indices().prepareAliases().removeAlias("test_*", "test_alias_2").get()); //ok: user has manage_aliases on test_* - assertAcked(client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "test_alias_*").get()); + assertAcked(client.admin().indices().prepareAliases().removeAlias("test_1", "test_alias_*").get()); { //fails: all aliases have been deleted, no existing aliases match test_alias_* AliasesNotFoundException exception = expectThrows(AliasesNotFoundException.class, - client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "test_alias_*")::get); + client.admin().indices().prepareAliases().removeAlias("test_1", "test_alias_*")::get); assertThat(exception.getMessage(), equalTo("aliases [test_alias_*] missing")); } { //fails: all aliases have been deleted, no existing aliases match _all AliasesNotFoundException exception = expectThrows(AliasesNotFoundException.class, - client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "_all")::get); + client.admin().indices().prepareAliases().removeAlias("test_1", "_all")::get); assertThat(exception.getMessage(), equalTo("aliases [_all] missing")); } + // add unauthorized aliases + if (randomBoolean()) { + assertAcked(client().admin().indices().prepareAliases().addAlias("test_1", "alias_1").get()); + } + assertAcked(client().admin().indices().prepareAliases().addAlias("test_1", "alias_2").get()); + //fails: user doesn't have manage_aliases on alias_1 - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() + assertThrowsAuthorizationException(client.admin().indices().prepareAliases() .removeAlias("test_1", "alias_1")::get, IndicesAliasesAction.NAME, "create_test_aliases_test"); //fails: user doesn't have manage_aliases on alias_1 - assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() + assertThrowsAuthorizationException(client.admin().indices().prepareAliases() .removeAlias("test_1", new String[]{"_all", "alias_1"})::get, IndicesAliasesAction.NAME, "create_test_aliases_test"); + + AliasesNotFoundException exception = expectThrows(AliasesNotFoundException.class, + client.admin().indices().prepareAliases().removeAlias("test_1", "*")::get); + assertThat(exception.getMessage(), equalTo("aliases [*] missing")); } public void testGetAliasesCreateAndAliasesPermission() { @@ -264,7 +293,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { // indices Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_test", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); assertAcked(client.admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias")).get()); //ok: user has manage_aliases on test_* @@ -315,7 +344,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testCreateIndexThenAliasesCreateAndAliasesPermission2() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_alias", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has create permission on test_* and manage_aliases permission on alias_*. manage_aliases is required to add/remove aliases // on both aliases and indices @@ -339,7 +368,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_alias", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has create permission on test_* and manage_aliases permission on alias_*. manage_aliases is required to add/remove aliases // on both aliases and indices @@ -352,7 +381,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testDeleteAliasesCreateAndAliasesPermission2() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_alias", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has create permission on test_* and manage_aliases permission on alias_*. manage_aliases is required to add/remove aliases // on both aliases and indices @@ -368,7 +397,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testGetAliasesCreateAndAliasesPermission2() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_alias", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has create permission on test_* and manage_aliases permission on alias_*. manage_aliases is required to retrieve aliases // on both aliases and indices @@ -413,7 +442,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testCreateIndexThenAliasesCreateAndAliasesPermission3() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_test_alias", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has create permission on test_* and manage_aliases permission on test_*,alias_*. All good. assertAcked(client.admin().indices().prepareCreate("test_1")); @@ -428,7 +457,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testCreateIndexAndAliasesCreateAndAliasesPermission3() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_test_alias", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has create permission on test_* and manage_aliases permission on test_*,alias_*. All good. assertAcked(client.admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias"))); @@ -439,7 +468,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testDeleteAliasesCreateAndAliasesPermission3() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_test_alias", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has create permission on test_* and manage_aliases permission on test_*,alias_*. All good. assertAcked(client.admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias")).addAlias(new Alias("alias_1")) @@ -462,7 +491,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testGetAliasesCreateAndAliasesPermission3() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("create_test_aliases_test_alias", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has create permission on test_* and manage_aliases permission on test_*,alias_*. All good. assertAcked(client.admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias")).addAlias(new Alias("alias_1"))); @@ -503,7 +532,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testGetAliasesAliasesOnlyPermissionStrict() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("aliases_only", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has manage_aliases only permissions on both alias_* and test_* //security plugin lets it through, but es core intercepts it due to strict indices options and throws index not found @@ -523,7 +552,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { public void testGetAliasesAliasesOnlyPermissionIgnoreUnavailable() { Map headers = Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("aliases_only", new SecureString("test123".toCharArray()))); - final Client client = client().filterWithHeader(headers); + final Client client = client(headers); //user has manage_aliases only permissions on both alias_* and test_* //ok: manage_aliases on both test_* and alias_* @@ -542,6 +571,37 @@ public class IndexAliasesTests extends SecurityIntegTestCase { assertEquals(0, getAliasesResponse.getAliases().size()); } + public void testRemoveIndex() { + final Map headers = Collections.singletonMap( + BASIC_AUTH_HEADER, + basicAuthHeaderValue("all_on_test", new SecureString("test123".toCharArray()))); + final Client client = client(headers); + + assertAcked(client.admin().indices().prepareCreate("test_delete_1").get()); + assertAcked(client.admin().indices().prepareCreate("test_1").addAlias(new Alias("test_alias_1"))); + + assertAcked(client.admin().indices().prepareAliases().removeIndex("test_delete_*").get()); + assertAliases(client.admin().indices().prepareGetAliases().setAliases("*"), "test_1", "test_alias_1"); + + // test that the remove index wildcard expacnds only to authorized indices + assertAcked(client.admin().indices().prepareAliases().removeIndex("*").get()); + GetAliasesResponse getAliasesResponse = client.admin().indices().prepareGetAliases().setAliases("*").get(); + assertThat(getAliasesResponse.getAliases().size(), equalTo(0)); + assertAliases(client().admin().indices().prepareGetAliases().setAliases("*"), "bogus_index_1", "bogus_alias_1", "bogus_alias_2"); + } + + private static Client client(final Map headers) { + // it should not matter what client we send the request to, but let's pin all requests to a specific node + final Client client; + if (internalCluster().numDataAndMasterNodes() == 1 || randomBoolean()) { + client = client(internalCluster().getMasterName()).filterWithHeader(headers); + } else { + client = client(randomValueOtherThan(internalCluster().getMasterName(), () -> randomFrom(internalCluster().getNodeNames()))) + .filterWithHeader(headers); + } + return client; + } + private static void assertAliases(GetAliasesRequestBuilder getAliasesRequestBuilder, String index, String... aliases) { GetAliasesResponse getAliasesResponse = getAliasesRequestBuilder.get(); assertThat(getAliasesResponse.getAliases().size(), equalTo(1)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index c2db3ba7352f..e79f6bea966b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -45,7 +45,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; -import org.elasticsearch.search.internal.ShardSearchTransportRequest; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.graph.action.GraphExploreAction; @@ -81,6 +81,7 @@ import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; @@ -194,7 +195,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { public void testDashIndicesAreAllowedInShardLevelRequests() { //indices with names starting with '-' or '+' can be created up to version 2.x and can be around in 5.x //aliases with names starting with '-' or '+' can be created up to version 5.x and can be around in 6.x - ShardSearchTransportRequest request = mock(ShardSearchTransportRequest.class); + ShardSearchRequest request = mock(ShardSearchRequest.class); when(request.indices()).thenReturn(new String[]{"-index10", "-index20", "+index30"}); List indices = resolveIndices(request, buildAuthorizedIndices(userDashIndices, SearchAction.NAME)) .getLocal(); @@ -204,7 +205,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { } public void testWildcardsAreNotAllowedInShardLevelRequests() { - ShardSearchTransportRequest request = mock(ShardSearchTransportRequest.class); + ShardSearchRequest request = mock(ShardSearchRequest.class); when(request.indices()).thenReturn(new String[]{"index*"}); IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, () -> resolveIndices(request, buildAuthorizedIndices(userDashIndices, SearchAction.NAME)) @@ -214,7 +215,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { } public void testAllIsNotAllowedInShardLevelRequests() { - ShardSearchTransportRequest request = mock(ShardSearchTransportRequest.class); + ShardSearchRequest request = mock(ShardSearchRequest.class); if (randomBoolean()) { when(request.indices()).thenReturn(new String[]{"_all"}); } else { @@ -775,14 +776,24 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { assertThat(request.getAliasActions().get(1).aliases(), arrayContainingInAnyOrder("foofoobar", "foobarfoo", "explicit")); } - public void testResolveAliasesWildcardsIndicesAliasesRequestDeleteActionsNoAuthorizedIndices() { + public void testResolveAliasesWildcardsIndicesAliasesRequestRemoveAliasActionsNoAuthorizedIndices() { IndicesAliasesRequest request = new IndicesAliasesRequest(); request.addAliasAction(AliasActions.remove().index("foo*").alias("foo*")); - //no authorized aliases match bar*, hence aliases are replaced with no-aliases-expression for that action request.addAliasAction(AliasActions.remove().index("*bar").alias("bar*")); resolveIndices(request, buildAuthorizedIndices(user, IndicesAliasesAction.NAME)); assertThat(request.getAliasActions().get(0).aliases(), arrayContainingInAnyOrder("foofoobar", "foobarfoo")); - assertThat(request.getAliasActions().get(1).aliases(), arrayContaining(IndicesAndAliasesResolver.NO_INDICES_OR_ALIASES_ARRAY)); + assertThat(request.getAliasActions().get(1).aliases(), arrayContaining("*", "-*")); + } + + public void testResolveAliasesWildcardsIndicesAliasesRequestRemoveIndexActions() { + IndicesAliasesRequest request = new IndicesAliasesRequest(); + request.addAliasAction(AliasActions.removeIndex().index("foo*")); + request.addAliasAction(AliasActions.removeIndex().index("*bar")); + resolveIndices(request, buildAuthorizedIndices(user, IndicesAliasesAction.NAME)); + assertThat(request.getAliasActions().get(0).indices(), arrayContainingInAnyOrder("foofoo")); + assertThat(request.getAliasActions().get(0).aliases(), emptyArray()); + assertThat(request.getAliasActions().get(1).indices(), arrayContainingInAnyOrder("bar")); + assertThat(request.getAliasActions().get(1).aliases(), emptyArray()); } public void testResolveWildcardsIndicesAliasesRequestAddAndDeleteActions() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java index cab4e660512c..0c20a7c20d09 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java @@ -61,7 +61,7 @@ public class RoleDescriptorTests extends ESTestCase { .indices("i1", "i2") .privileges("read") .grantedFields("body", "title") - .query("{\"query\": {\"match_all\": {}}}") + .query("{\"match_all\": {}}") .build() }; final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = { @@ -82,7 +82,7 @@ public class RoleDescriptorTests extends ESTestCase { assertThat(descriptor.toString(), is("Role[name=test, cluster=[all,none]" + ", global=[{APPLICATION:manage:applications=app01,app02}]" + ", indicesPrivileges=[IndicesPrivileges[indices=[i1,i2], allowRestrictedIndices=[false], privileges=[read]" + - ", field_security=[grant=[body,title], except=null], query={\"query\": {\"match_all\": {}}}],]" + + ", field_security=[grant=[body,title], except=null], query={\"match_all\": {}}],]" + ", applicationPrivileges=[ApplicationResourcePrivileges[application=my_app, privileges=[read,write], resources=[*]],]" + ", runAs=[sudo], metadata=[{}]]")); } @@ -94,7 +94,7 @@ public class RoleDescriptorTests extends ESTestCase { .privileges("read") .grantedFields("body", "title") .allowRestrictedIndices(randomBoolean()) - .query("{\"query\": {\"match_all\": {}}}") + .query("{\"match_all\": {}}") .build() }; final RoleDescriptor.ApplicationResourcePrivileges[] applicationPrivileges = { @@ -136,7 +136,7 @@ public class RoleDescriptorTests extends ESTestCase { "\"p2\"]}, {\"names\": \"idx2\", \"allow_restricted_indices\": true, \"privileges\": [\"p3\"], \"field_security\": " + "{\"grant\": [\"f1\", \"f2\"]}}, {\"names\": " + "\"idx2\", \"allow_restricted_indices\": false," + - "\"privileges\": [\"p3\"], \"field_security\": {\"grant\": [\"f1\", \"f2\"]}, \"query\": \"{\\\"match_all\\\": {}}\"}]}"; + "\"privileges\": [\"p3\"], \"field_security\": {\"grant\": [\"f1\", \"f2\"]}, \"query\": {\"match_all\": {}} }]}"; rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); assertEquals("test", rd.getName()); assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges()); @@ -261,6 +261,18 @@ public class RoleDescriptorTests extends ESTestCase { assertNull(rd.getIndicesPrivileges()[0].getQuery()); } + public void testParseNullQuery() throws Exception { + String json = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"], \"index\": [{\"names\": [\"idx1\",\"idx2\"], " + + "\"privileges\": [\"p1\", \"p2\"], \"query\": null}]}"; + RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(json), false, XContentType.JSON); + assertEquals("test", rd.getName()); + assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges()); + assertEquals(1, rd.getIndicesPrivileges().length); + assertArrayEquals(new String[] { "idx1", "idx2" }, rd.getIndicesPrivileges()[0].getIndices()); + assertArrayEquals(new String[] { "m", "n" }, rd.getRunAs()); + assertNull(rd.getIndicesPrivileges()[0].getQuery()); + } + public void testParseEmptyQueryUsingDeprecatedIndicesField() throws Exception { String json = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"], \"indices\": [{\"names\": [\"idx1\",\"idx2\"], " + "\"privileges\": [\"p1\", \"p2\"], \"query\": \"\"}]}"; @@ -283,4 +295,5 @@ public class RoleDescriptorTests extends ESTestCase { assertEquals(1, parsed.getTransientMetadata().size()); assertEquals(true, parsed.getTransientMetadata().get("enabled")); } + } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index 3a2c30891008..99ae113e15fe 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -9,9 +9,13 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -45,6 +49,7 @@ import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -61,11 +66,17 @@ import static org.mockito.Mockito.when; public class FileRolesStoreTests extends ESTestCase { + @Override + protected NamedXContentRegistry xContentRegistry() { + return new NamedXContentRegistry(singletonList(new NamedXContentRegistry.Entry(QueryBuilder.class, + new ParseField(MatchAllQueryBuilder.NAME), (p, c) -> MatchAllQueryBuilder.fromXContent(p)))); + } + public void testParseFile() throws Exception { Path path = getDataPath("roles.yml"); Map roles = FileRolesStore.parseFile(path, logger, Settings.builder() .put(XPackSettings.DLS_FLS_ENABLED.getKey(), true) - .build(), new XPackLicenseState(Settings.EMPTY)); + .build(), new XPackLicenseState(Settings.EMPTY), xContentRegistry()); assertThat(roles, notNullValue()); assertThat(roles.size(), is(9)); @@ -235,6 +246,8 @@ public class FileRolesStoreTests extends ESTestCase { assertTrue(group.getFieldPermissions().grantsAccessTo("boo")); assertTrue(group.getFieldPermissions().hasFieldLevelSecurity()); assertThat(group.getQuery(), notNullValue()); + + assertThat(roles.get("role_query_invalid"), nullValue()); } public void testParseFileWithFLSAndDLSDisabled() throws Exception { @@ -244,14 +257,15 @@ public class FileRolesStoreTests extends ESTestCase { events.clear(); Map roles = FileRolesStore.parseFile(path, logger, Settings.builder() .put(XPackSettings.DLS_FLS_ENABLED.getKey(), false) - .build(), new XPackLicenseState(Settings.EMPTY)); + .build(), new XPackLicenseState(Settings.EMPTY), xContentRegistry()); assertThat(roles, notNullValue()); assertThat(roles.size(), is(6)); assertThat(roles.get("role_fields"), nullValue()); assertThat(roles.get("role_query"), nullValue()); assertThat(roles.get("role_query_fields"), nullValue()); + assertThat(roles.get("role_query_invalid"), nullValue()); - assertThat(events, hasSize(3)); + assertThat(events, hasSize(4)); assertThat( events.get(0), startsWith("invalid role definition [role_fields] in roles file [" + path.toAbsolutePath() + @@ -262,6 +276,9 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(events.get(2), startsWith("invalid role definition [role_query_fields] in roles file [" + path.toAbsolutePath() + "]. document and field level security is not enabled.")); + assertThat(events.get(3), + startsWith("invalid role definition [role_query_invalid] in roles file [" + path.toAbsolutePath() + + "]. document and field level security is not enabled.")); } public void testParseFileWithFLSAndDLSUnlicensed() throws Exception { @@ -271,7 +288,7 @@ public class FileRolesStoreTests extends ESTestCase { events.clear(); XPackLicenseState licenseState = mock(XPackLicenseState.class); when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(false); - Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, licenseState); + Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, licenseState, xContentRegistry()); assertThat(roles, notNullValue()); assertThat(roles.size(), is(9)); assertNotNull(roles.get("role_fields")); @@ -295,7 +312,8 @@ public class FileRolesStoreTests extends ESTestCase { public void testDefaultRolesFile() throws Exception { // TODO we should add the config dir to the resources so we don't copy this stuff around... Path path = getDataPath("default_roles.yml"); - Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY)); + Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY), + xContentRegistry()); assertThat(roles, notNullValue()); assertThat(roles.size(), is(0)); } @@ -325,7 +343,7 @@ public class FileRolesStoreTests extends ESTestCase { FileRolesStore store = new FileRolesStore(settings, env, watcherService, roleSet -> { modifiedRoles.addAll(roleSet); latch.countDown(); - }, new XPackLicenseState(Settings.EMPTY)); + }, new XPackLicenseState(Settings.EMPTY), xContentRegistry()); Set descriptors = store.roleDescriptors(Collections.singleton("role1")); assertThat(descriptors, notNullValue()); @@ -368,7 +386,7 @@ public class FileRolesStoreTests extends ESTestCase { store = new FileRolesStore(settings, env, watcherService, roleSet -> { truncatedFileRolesModified.addAll(roleSet); truncateLatch.countDown(); - }, new XPackLicenseState(Settings.EMPTY)); + }, new XPackLicenseState(Settings.EMPTY), xContentRegistry()); final Set allRolesPreTruncate = store.getAllRoleNames(); try (BufferedWriter writer = Files.newBufferedWriter(tmp, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING)) { @@ -391,7 +409,7 @@ public class FileRolesStoreTests extends ESTestCase { store = new FileRolesStore(settings, env, watcherService, roleSet -> { modifiedFileRolesModified.addAll(roleSet); modifyLatch.countDown(); - }, new XPackLicenseState(Settings.EMPTY)); + }, new XPackLicenseState(Settings.EMPTY), xContentRegistry()); try (BufferedWriter writer = Files.newBufferedWriter(tmp, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING)) { writer.append("role5:").append(System.lineSeparator()); @@ -416,7 +434,8 @@ public class FileRolesStoreTests extends ESTestCase { public void testThatEmptyFileDoesNotResultInLoop() throws Exception { Path file = createTempFile(); Files.write(file, Collections.singletonList("#"), StandardCharsets.UTF_8); - Map roles = FileRolesStore.parseFile(file, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY)); + Map roles = FileRolesStore.parseFile(file, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY), + xContentRegistry()); assertThat(roles.keySet(), is(empty())); } @@ -425,7 +444,8 @@ public class FileRolesStoreTests extends ESTestCase { Logger logger = CapturingLogger.newCapturingLogger(Level.ERROR, null); List entries = CapturingLogger.output(logger.getName(), Level.ERROR); entries.clear(); - Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY)); + Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY), + xContentRegistry()); assertThat(roles.size(), is(1)); assertThat(roles, hasKey("valid_role")); RoleDescriptor descriptor = roles.get("valid_role"); @@ -467,7 +487,8 @@ public class FileRolesStoreTests extends ESTestCase { List events = CapturingLogger.output(logger.getName(), Level.ERROR); events.clear(); Path path = getDataPath("reserved_roles.yml"); - Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY)); + Map roles = FileRolesStore.parseFile(path, logger, Settings.EMPTY, new XPackLicenseState(Settings.EMPTY), + xContentRegistry()); assertThat(roles, notNullValue()); assertThat(roles.size(), is(1)); @@ -498,7 +519,8 @@ public class FileRolesStoreTests extends ESTestCase { .put(XPackSettings.DLS_FLS_ENABLED.getKey(), flsDlsEnabled) .build(); Environment env = TestEnvironment.newEnvironment(settings); - FileRolesStore store = new FileRolesStore(settings, env, mock(ResourceWatcherService.class), new XPackLicenseState(Settings.EMPTY)); + FileRolesStore store = new FileRolesStore(settings, env, mock(ResourceWatcherService.class), new XPackLicenseState(Settings.EMPTY), + xContentRegistry()); Map usageStats = store.usageStats(); @@ -512,9 +534,10 @@ public class FileRolesStoreTests extends ESTestCase { Path path = getDataPath("roles2xformat.yml"); byte[] bytes = Files.readAllBytes(path); String roleString = new String(bytes, Charset.defaultCharset()); - RoleDescriptor role = FileRolesStore.parseRoleDescriptor(roleString, path, logger, true, Settings.EMPTY); + RoleDescriptor role = FileRolesStore.parseRoleDescriptor(roleString, path, logger, true, Settings.EMPTY, xContentRegistry()); RoleDescriptor.IndicesPrivileges indicesPrivileges = role.getIndicesPrivileges()[0]; assertThat(indicesPrivileges.getGrantedFields(), arrayContaining("foo", "boo")); assertNull(indicesPrivileges.getDeniedFields()); } + } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java index 657a1f47e42d..1ba2ff637f35 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java @@ -189,7 +189,7 @@ public class NativePrivilegeStoreTests extends ESTestCase { assertThat(request.indices(), arrayContaining(RestrictedIndicesNames.SECURITY_MAIN_ALIAS)); final String query = Strings.toString(request.source().query()); - assertThat(query, containsString("{\"bool\":{\"filter\":[{\"terms\":{\"application\":[\"yourapp\"]")); + assertThat(query, containsString("{\"bool\":{\"should\":[{\"terms\":{\"application\":[\"yourapp\"]")); assertThat(query, containsString("{\"prefix\":{\"application\":{\"value\":\"myapp-\"")); assertThat(query, containsString("{\"term\":{\"type\":{\"value\":\"application-privilege\"")); @@ -281,7 +281,7 @@ public class NativePrivilegeStoreTests extends ESTestCase { )); } - awaitBusy(() -> requests.size() > 0, 1, TimeUnit.SECONDS); + assertBusy(() -> assertFalse(requests.isEmpty()), 1, TimeUnit.SECONDS); assertThat(requests, iterableWithSize(1)); assertThat(requests.get(0), instanceOf(ClearRolesCacheRequest.class)); @@ -322,7 +322,7 @@ public class NativePrivilegeStoreTests extends ESTestCase { )); } - awaitBusy(() -> requests.size() > 0, 1, TimeUnit.SECONDS); + assertBusy(() -> assertFalse(requests.isEmpty()), 1, TimeUnit.SECONDS); assertThat(requests, iterableWithSize(1)); assertThat(requests.get(0), instanceOf(ClearRolesCacheRequest.class)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java index 3572eb2a64d0..819dd8d2ecc8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java @@ -88,6 +88,10 @@ public class SSLClientAuthTests extends SecurityIntegTestCase { return builder // invert the require auth settings .put("xpack.security.transport.ssl.client_authentication", SSLClientAuth.NONE) + // Due to the TLSv1.3 bug with session resumption when client authentication is not + // used, we need to set the protocols since we disabled client auth for transport + // to avoid failures on pre 11.0.3 JDKs. See #getProtocols + .putList("xpack.security.transport.ssl.supported_protocols", getProtocols()) .put("xpack.security.http.ssl.enabled", true) .put("xpack.security.http.ssl.client_authentication", SSLClientAuth.REQUIRED) .build(); @@ -98,7 +102,6 @@ public class SSLClientAuthTests extends SecurityIntegTestCase { return true; } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/46230") public void testThatHttpFailsWithoutSslClientAuth() throws IOException { SSLIOSessionStrategy sessionStrategy = new SSLIOSessionStrategy(SSLContexts.createDefault(), NoopHostnameVerifier.INSTANCE); try (RestClient restClient = createRestClient(httpClientBuilder -> httpClientBuilder.setSSLStrategy(sessionStrategy), "https")) { @@ -115,7 +118,6 @@ public class SSLClientAuthTests extends SecurityIntegTestCase { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/46230") public void testThatHttpWorksWithSslClientAuth() throws IOException { SSLIOSessionStrategy sessionStrategy = new SSLIOSessionStrategy(getSSLContext(), NoopHostnameVerifier.INSTANCE); try (RestClient restClient = createRestClient(httpClientBuilder -> httpClientBuilder.setSSLStrategy(sessionStrategy), "https")) { diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml index 99459c5f5ec2..c1c8bc4b1d7f 100644 --- a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles.yml @@ -65,8 +65,16 @@ role_query_fields: privileges: - READ query: - match_all: + match_all: {} field_security: grant: - foo - - boo \ No newline at end of file + - boo + +role_query_invalid: + indices: + - names: + - 'query_idx' + privileges: + - READ + query: '{ "unknown": {} }' \ No newline at end of file diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles2xformat.yml b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles2xformat.yml index ebfdce617a01..d0eb7ba4922b 100644 --- a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles2xformat.yml +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/roles2xformat.yml @@ -5,7 +5,7 @@ role1: privileges: - READ query: - match_all: + match_all: {} fields: - foo - boo \ No newline at end of file diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderTests.java index 3e671d588254..fb5aa5778c64 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderTests.java @@ -29,10 +29,10 @@ import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.xpack.spatial.SpatialPlugin; import org.elasticsearch.xpack.spatial.util.ShapeTestUtils; @@ -135,7 +135,7 @@ public class ShapeQueryBuilderTests extends AbstractQueryTestCase 0 ORDER BY ly; +SELECT city, ST_X(geoshape) x, ST_Y(geoshape) y, ST_Z(geoshape) z, ST_X(location) lx, ST_Y(location) ly FROM geo WHERE lx > 0 ORDER BY ly; city:s | x:d | y:d | z:d | lx:d | ly:d Sydney |151.208629 |-33.863385 |100.0 |151.20862897485495|-33.863385021686554 diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.json b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.json index 56007a0284c4..8ef9e39991e3 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.json +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/geosql.json @@ -17,12 +17,15 @@ "type": "geo_point", "doc_values": "false" }, - "shape": { + "geoshape": { "type": "geo_shape" }, "region_point": { "type": "keyword" + }, + "shape": { + "type": "shape" } - } + } } } \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/geo/setup_test_geo.sql b/x-pack/plugin/sql/qa/src/main/resources/geo/setup_test_geo.sql index b8b8d4e36f45..c736b0b5f4f5 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/geo/setup_test_geo.sql +++ b/x-pack/plugin/sql/qa/src/main/resources/geo/setup_test_geo.sql @@ -4,6 +4,7 @@ CREATE TABLE "geo" ( "region" VARCHAR(50), "region_point" VARCHAR(50), "location" POINT, + "geoshape" GEOMETRY, "shape" GEOMETRY ) AS SELECT * FROM CSVREAD('classpath:/geo/geo.csv'); diff --git a/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.csv-spec index f1941161697d..3f51c34d6ea4 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/ogc/ogc.csv-spec @@ -19,18 +19,18 @@ address | VARCHAR | text address.keyword | VARCHAR | keyword aliases | VARCHAR | text aliases.keyword | VARCHAR | keyword -boundary | GEOMETRY | geo_shape -centerline | GEOMETRY | geo_shape -centerlines | GEOMETRY | geo_shape +boundary | GEOMETRY | shape +centerline | GEOMETRY | shape +centerlines | GEOMETRY | shape fid | INTEGER | integer -footprint | GEOMETRY | geo_shape +footprint | GEOMETRY | shape name | VARCHAR | text name.keyword | VARCHAR | keyword -neatline | GEOMETRY | geo_shape +neatline | GEOMETRY | shape num_lanes | INTEGER | integer ogc_type | VARCHAR | keyword -position | GEOMETRY | geo_shape -shore | GEOMETRY | geo_shape -shores | GEOMETRY | geo_shape +position | GEOMETRY | shape +shore | GEOMETRY | shape +shores | GEOMETRY | shape type | VARCHAR | keyword ; diff --git a/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec new file mode 100644 index 000000000000..0baa18765ff6 --- /dev/null +++ b/x-pack/plugin/sql/qa/src/main/resources/pivot.csv-spec @@ -0,0 +1,191 @@ +averageWithOneValue +schema::languages:bt|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('F')); + + languages | 'F' +---------------+------------------ +null |62140.666666666664 +1 |47073.25 +2 |50684.4 +3 |53660.0 +4 |49291.5 +5 |46705.555555555555 +; + +averageWithAliasAndOneValue +schema::languages:bt|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) AS "AVG" FOR gender IN ('F')); + + languages | 'F' +---------------+------------------ +null |62140.666666666664 +1 |47073.25 +2 |50684.4 +3 |53660.0 +4 |49291.5 +5 |46705.555555555555 +; + +averageWithAliasedValue +schema::languages:bt|XX:d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('F' AS "XX")); + + languages | XX +---------------+------------------ +null |62140.666666666664 +1 |47073.25 +2 |50684.4 +3 |53660.0 +4 |49291.5 +5 |46705.555555555555 +; + +averageWithTwoValues +schema::languages:bt|'M':d|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')); + + languages | 'M' | 'F' +---------------+-----------------+------------------ +null |48396.28571428572|62140.666666666664 +1 |49767.22222222222|47073.25 +2 |44103.90909090909|50684.4 +3 |51741.90909090909|53660.0 +4 |47058.90909090909|49291.5 +5 |39052.875 |46705.555555555555 +; + +averageWithTwoValuesAndAlias +schema::languages:bt|XY:d|XX:d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M' AS "XY", 'F' "XX")); + + languages | XY | XX +---------------+-----------------+------------------ +null |48396.28571428572|62140.666666666664 +1 |49767.22222222222|47073.25 +2 |44103.90909090909|50684.4 +3 |51741.90909090909|53660.0 +4 |47058.90909090909|49291.5 +5 |39052.875 |46705.555555555555 +; + +averageWithThreeValuesIncludingNull +schema::languages:bt|'M':d|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')); + + languages | 'M' | 'F' +---------------+-----------------+------------------ +null |48396.28571428572|62140.666666666664 +1 |49767.22222222222|47073.25 +2 |44103.90909090909|50684.4 +3 |51741.90909090909|53660.0 +4 |47058.90909090909|49291.5 +5 |39052.875 |46705.555555555555 +; + +averageWithTwoValuesAndLimit +schema::languages:bt|'M':d|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')) LIMIT 3; + + languages | 'M' | 'F' +---------------+-----------------+------------------ +null |48396.28571428572|62140.666666666664 +1 |49767.22222222222|47073.25 +2 |44103.90909090909|50684.4 +; + + +averageWithTwoValuesAndTinyLimit +schema::languages:bt|'M':d|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')) LIMIT 1; + + languages | 'M' | 'F' +---------------+-----------------+------------------ +null |48396.28571428572|62140.666666666664 +; + + +averageWithTwoValuesAndSmallLimit +schema::languages:bt|'M':d|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')) LIMIT 2; + + languages | 'M' | 'F' +---------------+-----------------+------------------ +null |48396.28571428572|62140.666666666664 +1 |49767.22222222222|47073.25 +; + +averageWithOneValueAndOrder +schema::languages:bt|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('F')) ORDER BY languages DESC LIMIT 4; + languages | 'F' +---------------+------------------ +5 |46705.555555555555 +4 |49291.5 +3 |53660.0 +2 |50684.4 +; + +averageWithTwoValuesAndOrderDesc +schema::languages:bt|'M':d|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')) ORDER BY languages DESC; + + languages | 'M' | 'F' +---------------+-----------------+------------------ +5 |39052.875 |46705.555555555555 +4 |47058.90909090909|49291.5 +3 |51741.90909090909|53660.0 +2 |44103.90909090909|50684.4 +1 |49767.22222222222|47073.25 +null |48396.28571428572|62140.666666666664 +; + +averageWithTwoValuesAndOrderDescAndLimit +schema::languages:bt|'M':d|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')) ORDER BY languages DESC LIMIT 2; + + languages | 'M' | 'F' +---------------+-----------------+------------------ +5 |39052.875 |46705.555555555555 +4 |47058.90909090909|49291.5 +; + +averageWithTwoValuesAndOrderAsc +schema::languages:bt|'M':d|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (AVG(salary) FOR gender IN ('M', 'F')) ORDER BY languages ASC; + + languages | 'M' | 'F' +---------------+-----------------+------------------ +null |48396.28571428572|62140.666666666664 +1 |49767.22222222222|47073.25 +2 |44103.90909090909|50684.4 +3 |51741.90909090909|53660.0 +4 |47058.90909090909|49291.5 +5 |39052.875 |46705.555555555555 +; + +sumWithoutSubquery +schema::birth_date:ts|emp_no:i|first_name:s|gender:s|hire_date:ts|last_name:s|1:i|2:i +SELECT * FROM test_emp PIVOT (SUM(salary) FOR languages IN (1, 2)) LIMIT 5; + + birth_date | emp_no | first_name | gender | hire_date | last_name | 1 | 2 +---------------------+---------------+---------------+---------------+---------------------+---------------+---------------+--------------- +null |10041 |Uri |F |1989-11-12 00:00:00.0|Lenart |56415 |null +null |10043 |Yishay |M |1990-10-20 00:00:00.0|Tzvieli |34341 |null +null |10044 |Mingsen |F |1994-05-21 00:00:00.0|Casley |39728 |null +1952-04-19 00:00:00.0|10009 |Sumant |F |1985-02-18 00:00:00.0|Peac |66174 |null +1953-01-07 00:00:00.0|10067 |Claudi |M |1987-03-04 00:00:00.0|Stavenow |null |52044 +; + +averageWithOneValueAndMath +schema::languages:bt|'F':d +SELECT * FROM (SELECT languages, gender, salary FROM test_emp) PIVOT (ROUND(AVG(salary) / 2) FOR gender IN ('F')); + + languages | 'F' +---------------+--------------- +null |31070.0 +1 |23537.0 +2 |25342.0 +3 |26830.0 +4 |24646.0 +5 |23353.0 +; \ No newline at end of file diff --git a/x-pack/plugin/sql/qa/src/main/resources/single-node-only/command-sys-geo.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/single-node-only/command-sys-geo.csv-spec index 6d165c33433d..b3c32d7eabd2 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/single-node-only/command-sys-geo.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/single-node-only/command-sys-geo.csv-spec @@ -7,9 +7,10 @@ SYS COLUMNS TABLE LIKE 'geo'; TABLE_CAT:s | TABLE_SCHEM:s| TABLE_NAME:s | COLUMN_NAME:s | DATA_TYPE:i | TYPE_NAME:s | COLUMN_SIZE:i|BUFFER_LENGTH:i|DECIMAL_DIGITS:i|NUM_PREC_RADIX:i| NULLABLE:i| REMARKS:s | COLUMN_DEF:s |SQL_DATA_TYPE:i|SQL_DATETIME_SUB:i|CHAR_OCTET_LENGTH:i|ORDINAL_POSITION:i|IS_NULLABLE:s|SCOPE_CATALOG:s|SCOPE_SCHEMA:s|SCOPE_TABLE:s|SOURCE_DATA_TYPE:sh|IS_AUTOINCREMENT:s|IS_GENERATEDCOLUMN:s integTest|null |geo |city |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |1 |YES |null |null |null |null |NO |NO -integTest|null |geo |location |114 |GEO_POINT |58 |16 |null |null |1 |null |null |114 |0 |null |2 |YES |null |null |null |null |NO |NO -integTest|null |geo |location_no_dv |114 |GEO_POINT |58 |16 |null |null |1 |null |null |114 |0 |null |3 |YES |null |null |null |null |NO |NO -integTest|null |geo |region |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |4 |YES |null |null |null |null |NO |NO -integTest|null |geo |region_point |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |5 |YES |null |null |null |null |NO |NO -integTest|null |geo |shape |114 |GEO_SHAPE |2147483647 |2147483647 |null |null |1 |null |null |114 |0 |null |6 |YES |null |null |null |null |NO |NO +integTest|null |geo |geoshape |114 |GEO_SHAPE |2147483647 |2147483647 |null |null |1 |null |null |114 |0 |null |2 |YES |null |null |null |null |NO |NO +integTest|null |geo |location |114 |GEO_POINT |58 |16 |null |null |1 |null |null |114 |0 |null |3 |YES |null |null |null |null |NO |NO +integTest|null |geo |location_no_dv |114 |GEO_POINT |58 |16 |null |null |1 |null |null |114 |0 |null |4 |YES |null |null |null |null |NO |NO +integTest|null |geo |region |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |5 |YES |null |null |null |null |NO |NO +integTest|null |geo |region_point |12 |KEYWORD |32766 |2147483647 |null |null |1 |null |null |12 |0 |2147483647 |6 |YES |null |null |null |null |NO |NO +integTest|null |geo |shape |114 |SHAPE |2147483647 |2147483647 |null |null |1 |null |null |114 |0 |null |7 |YES |null |null |null |null |NO |NO ; \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 index 76af159be902..86c119524989 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 +++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 @@ -90,7 +90,7 @@ orderBy ; querySpecification - : SELECT setQuantifier? selectItem (',' selectItem)* + : SELECT setQuantifier? selectItems fromClause? (WHERE where=booleanExpression)? (GROUP BY groupBy)? @@ -98,7 +98,7 @@ querySpecification ; fromClause - : FROM relation (',' relation)* + : FROM relation (',' relation)* pivotClause? ; groupBy @@ -123,6 +123,10 @@ setQuantifier | ALL ; +selectItems + : selectItem (',' selectItem)* + ; + selectItem : expression (AS? identifier)? #selectExpression ; @@ -154,6 +158,18 @@ relationPrimary | '(' relation ')' (AS? qualifiedName)? #aliasedRelation ; +pivotClause + : PIVOT '(' aggs=pivotArgs FOR column=qualifiedName IN '(' vals=pivotArgs ')' ')' + ; + +pivotArgs + : namedValueExpression (',' namedValueExpression)* + ; + +namedValueExpression + : valueExpression (AS? identifier)? + ; + expression : booleanExpression ; @@ -343,6 +359,7 @@ whenClause ; // http://developer.mimer.se/validator/sql-reserved-words.tml +// https://developer.mimer.com/wp-content/uploads/standard-sql-reserved-words-summary.pdf nonReserved : ANALYZE | ANALYZED | CATALOGS | COLUMNS | CURRENT_DATE | CURRENT_TIME | CURRENT_TIMESTAMP @@ -355,7 +372,7 @@ nonReserved | LAST | LIMIT | MAPPED | MINUTE | MONTH | OPTIMIZED - | PARSED | PHYSICAL | PLAN + | PARSED | PHYSICAL | PIVOT | PLAN | QUERY | RLIKE | SCHEMAS | SECOND | SHOW | SYS @@ -397,6 +414,7 @@ EXPLAIN: 'EXPLAIN'; EXTRACT: 'EXTRACT'; FALSE: 'FALSE'; FIRST: 'FIRST'; +FOR: 'FOR'; FORMAT: 'FORMAT'; FROM: 'FROM'; FROZEN: 'FROZEN'; @@ -434,6 +452,7 @@ ORDER: 'ORDER'; OUTER: 'OUTER'; PARSED: 'PARSED'; PHYSICAL: 'PHYSICAL'; +PIVOT: 'PIVOT'; PLAN: 'PLAN'; RIGHT: 'RIGHT'; RLIKE: 'RLIKE'; diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens b/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens index 7eeec75f9c92..9771af465bb4 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens +++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens @@ -35,105 +35,107 @@ EXPLAIN=34 EXTRACT=35 FALSE=36 FIRST=37 -FORMAT=38 -FROM=39 -FROZEN=40 -FULL=41 -FUNCTIONS=42 -GRAPHVIZ=43 -GROUP=44 -HAVING=45 -HOUR=46 -HOURS=47 -IN=48 -INCLUDE=49 -INNER=50 -INTERVAL=51 -IS=52 -JOIN=53 -LAST=54 -LEFT=55 -LIKE=56 -LIMIT=57 -MAPPED=58 -MATCH=59 -MINUTE=60 -MINUTES=61 -MONTH=62 -MONTHS=63 -NATURAL=64 -NOT=65 -NULL=66 -NULLS=67 -ON=68 -OPTIMIZED=69 -OR=70 -ORDER=71 -OUTER=72 -PARSED=73 -PHYSICAL=74 -PLAN=75 -RIGHT=76 -RLIKE=77 -QUERY=78 -SCHEMAS=79 -SECOND=80 -SECONDS=81 -SELECT=82 -SHOW=83 -SYS=84 -TABLE=85 -TABLES=86 -TEXT=87 -THEN=88 -TRUE=89 -TO=90 -TYPE=91 -TYPES=92 -USING=93 -VERIFY=94 -WHEN=95 -WHERE=96 -WITH=97 -YEAR=98 -YEARS=99 -ESCAPE_ESC=100 -FUNCTION_ESC=101 -LIMIT_ESC=102 -DATE_ESC=103 -TIME_ESC=104 -TIMESTAMP_ESC=105 -GUID_ESC=106 -ESC_END=107 -EQ=108 -NULLEQ=109 -NEQ=110 -LT=111 -LTE=112 -GT=113 -GTE=114 -PLUS=115 -MINUS=116 -ASTERISK=117 -SLASH=118 -PERCENT=119 -CAST_OP=120 -CONCAT=121 -DOT=122 -PARAM=123 -STRING=124 -INTEGER_VALUE=125 -DECIMAL_VALUE=126 -IDENTIFIER=127 -DIGIT_IDENTIFIER=128 -TABLE_IDENTIFIER=129 -QUOTED_IDENTIFIER=130 -BACKQUOTED_IDENTIFIER=131 -SIMPLE_COMMENT=132 -BRACKETED_COMMENT=133 -WS=134 -UNRECOGNIZED=135 -DELIMITER=136 +FOR=38 +FORMAT=39 +FROM=40 +FROZEN=41 +FULL=42 +FUNCTIONS=43 +GRAPHVIZ=44 +GROUP=45 +HAVING=46 +HOUR=47 +HOURS=48 +IN=49 +INCLUDE=50 +INNER=51 +INTERVAL=52 +IS=53 +JOIN=54 +LAST=55 +LEFT=56 +LIKE=57 +LIMIT=58 +MAPPED=59 +MATCH=60 +MINUTE=61 +MINUTES=62 +MONTH=63 +MONTHS=64 +NATURAL=65 +NOT=66 +NULL=67 +NULLS=68 +ON=69 +OPTIMIZED=70 +OR=71 +ORDER=72 +OUTER=73 +PARSED=74 +PHYSICAL=75 +PIVOT=76 +PLAN=77 +RIGHT=78 +RLIKE=79 +QUERY=80 +SCHEMAS=81 +SECOND=82 +SECONDS=83 +SELECT=84 +SHOW=85 +SYS=86 +TABLE=87 +TABLES=88 +TEXT=89 +THEN=90 +TRUE=91 +TO=92 +TYPE=93 +TYPES=94 +USING=95 +VERIFY=96 +WHEN=97 +WHERE=98 +WITH=99 +YEAR=100 +YEARS=101 +ESCAPE_ESC=102 +FUNCTION_ESC=103 +LIMIT_ESC=104 +DATE_ESC=105 +TIME_ESC=106 +TIMESTAMP_ESC=107 +GUID_ESC=108 +ESC_END=109 +EQ=110 +NULLEQ=111 +NEQ=112 +LT=113 +LTE=114 +GT=115 +GTE=116 +PLUS=117 +MINUS=118 +ASTERISK=119 +SLASH=120 +PERCENT=121 +CAST_OP=122 +CONCAT=123 +DOT=124 +PARAM=125 +STRING=126 +INTEGER_VALUE=127 +DECIMAL_VALUE=128 +IDENTIFIER=129 +DIGIT_IDENTIFIER=130 +TABLE_IDENTIFIER=131 +QUOTED_IDENTIFIER=132 +BACKQUOTED_IDENTIFIER=133 +SIMPLE_COMMENT=134 +BRACKETED_COMMENT=135 +WS=136 +UNRECOGNIZED=137 +DELIMITER=138 '('=1 ')'=2 ','=3 @@ -171,88 +173,90 @@ DELIMITER=136 'EXTRACT'=35 'FALSE'=36 'FIRST'=37 -'FORMAT'=38 -'FROM'=39 -'FROZEN'=40 -'FULL'=41 -'FUNCTIONS'=42 -'GRAPHVIZ'=43 -'GROUP'=44 -'HAVING'=45 -'HOUR'=46 -'HOURS'=47 -'IN'=48 -'INCLUDE'=49 -'INNER'=50 -'INTERVAL'=51 -'IS'=52 -'JOIN'=53 -'LAST'=54 -'LEFT'=55 -'LIKE'=56 -'LIMIT'=57 -'MAPPED'=58 -'MATCH'=59 -'MINUTE'=60 -'MINUTES'=61 -'MONTH'=62 -'MONTHS'=63 -'NATURAL'=64 -'NOT'=65 -'NULL'=66 -'NULLS'=67 -'ON'=68 -'OPTIMIZED'=69 -'OR'=70 -'ORDER'=71 -'OUTER'=72 -'PARSED'=73 -'PHYSICAL'=74 -'PLAN'=75 -'RIGHT'=76 -'RLIKE'=77 -'QUERY'=78 -'SCHEMAS'=79 -'SECOND'=80 -'SECONDS'=81 -'SELECT'=82 -'SHOW'=83 -'SYS'=84 -'TABLE'=85 -'TABLES'=86 -'TEXT'=87 -'THEN'=88 -'TRUE'=89 -'TO'=90 -'TYPE'=91 -'TYPES'=92 -'USING'=93 -'VERIFY'=94 -'WHEN'=95 -'WHERE'=96 -'WITH'=97 -'YEAR'=98 -'YEARS'=99 -'{ESCAPE'=100 -'{FN'=101 -'{LIMIT'=102 -'{D'=103 -'{T'=104 -'{TS'=105 -'{GUID'=106 -'}'=107 -'='=108 -'<=>'=109 -'<'=111 -'<='=112 -'>'=113 -'>='=114 -'+'=115 -'-'=116 -'*'=117 -'/'=118 -'%'=119 -'::'=120 -'||'=121 -'.'=122 -'?'=123 +'FOR'=38 +'FORMAT'=39 +'FROM'=40 +'FROZEN'=41 +'FULL'=42 +'FUNCTIONS'=43 +'GRAPHVIZ'=44 +'GROUP'=45 +'HAVING'=46 +'HOUR'=47 +'HOURS'=48 +'IN'=49 +'INCLUDE'=50 +'INNER'=51 +'INTERVAL'=52 +'IS'=53 +'JOIN'=54 +'LAST'=55 +'LEFT'=56 +'LIKE'=57 +'LIMIT'=58 +'MAPPED'=59 +'MATCH'=60 +'MINUTE'=61 +'MINUTES'=62 +'MONTH'=63 +'MONTHS'=64 +'NATURAL'=65 +'NOT'=66 +'NULL'=67 +'NULLS'=68 +'ON'=69 +'OPTIMIZED'=70 +'OR'=71 +'ORDER'=72 +'OUTER'=73 +'PARSED'=74 +'PHYSICAL'=75 +'PIVOT'=76 +'PLAN'=77 +'RIGHT'=78 +'RLIKE'=79 +'QUERY'=80 +'SCHEMAS'=81 +'SECOND'=82 +'SECONDS'=83 +'SELECT'=84 +'SHOW'=85 +'SYS'=86 +'TABLE'=87 +'TABLES'=88 +'TEXT'=89 +'THEN'=90 +'TRUE'=91 +'TO'=92 +'TYPE'=93 +'TYPES'=94 +'USING'=95 +'VERIFY'=96 +'WHEN'=97 +'WHERE'=98 +'WITH'=99 +'YEAR'=100 +'YEARS'=101 +'{ESCAPE'=102 +'{FN'=103 +'{LIMIT'=104 +'{D'=105 +'{T'=106 +'{TS'=107 +'{GUID'=108 +'}'=109 +'='=110 +'<=>'=111 +'<'=113 +'<='=114 +'>'=115 +'>='=116 +'+'=117 +'-'=118 +'*'=119 +'/'=120 +'%'=121 +'::'=122 +'||'=123 +'.'=124 +'?'=125 diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens b/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens index 603e67fec88c..adb6142e8653 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens +++ b/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens @@ -35,104 +35,106 @@ EXPLAIN=34 EXTRACT=35 FALSE=36 FIRST=37 -FORMAT=38 -FROM=39 -FROZEN=40 -FULL=41 -FUNCTIONS=42 -GRAPHVIZ=43 -GROUP=44 -HAVING=45 -HOUR=46 -HOURS=47 -IN=48 -INCLUDE=49 -INNER=50 -INTERVAL=51 -IS=52 -JOIN=53 -LAST=54 -LEFT=55 -LIKE=56 -LIMIT=57 -MAPPED=58 -MATCH=59 -MINUTE=60 -MINUTES=61 -MONTH=62 -MONTHS=63 -NATURAL=64 -NOT=65 -NULL=66 -NULLS=67 -ON=68 -OPTIMIZED=69 -OR=70 -ORDER=71 -OUTER=72 -PARSED=73 -PHYSICAL=74 -PLAN=75 -RIGHT=76 -RLIKE=77 -QUERY=78 -SCHEMAS=79 -SECOND=80 -SECONDS=81 -SELECT=82 -SHOW=83 -SYS=84 -TABLE=85 -TABLES=86 -TEXT=87 -THEN=88 -TRUE=89 -TO=90 -TYPE=91 -TYPES=92 -USING=93 -VERIFY=94 -WHEN=95 -WHERE=96 -WITH=97 -YEAR=98 -YEARS=99 -ESCAPE_ESC=100 -FUNCTION_ESC=101 -LIMIT_ESC=102 -DATE_ESC=103 -TIME_ESC=104 -TIMESTAMP_ESC=105 -GUID_ESC=106 -ESC_END=107 -EQ=108 -NULLEQ=109 -NEQ=110 -LT=111 -LTE=112 -GT=113 -GTE=114 -PLUS=115 -MINUS=116 -ASTERISK=117 -SLASH=118 -PERCENT=119 -CAST_OP=120 -CONCAT=121 -DOT=122 -PARAM=123 -STRING=124 -INTEGER_VALUE=125 -DECIMAL_VALUE=126 -IDENTIFIER=127 -DIGIT_IDENTIFIER=128 -TABLE_IDENTIFIER=129 -QUOTED_IDENTIFIER=130 -BACKQUOTED_IDENTIFIER=131 -SIMPLE_COMMENT=132 -BRACKETED_COMMENT=133 -WS=134 -UNRECOGNIZED=135 +FOR=38 +FORMAT=39 +FROM=40 +FROZEN=41 +FULL=42 +FUNCTIONS=43 +GRAPHVIZ=44 +GROUP=45 +HAVING=46 +HOUR=47 +HOURS=48 +IN=49 +INCLUDE=50 +INNER=51 +INTERVAL=52 +IS=53 +JOIN=54 +LAST=55 +LEFT=56 +LIKE=57 +LIMIT=58 +MAPPED=59 +MATCH=60 +MINUTE=61 +MINUTES=62 +MONTH=63 +MONTHS=64 +NATURAL=65 +NOT=66 +NULL=67 +NULLS=68 +ON=69 +OPTIMIZED=70 +OR=71 +ORDER=72 +OUTER=73 +PARSED=74 +PHYSICAL=75 +PIVOT=76 +PLAN=77 +RIGHT=78 +RLIKE=79 +QUERY=80 +SCHEMAS=81 +SECOND=82 +SECONDS=83 +SELECT=84 +SHOW=85 +SYS=86 +TABLE=87 +TABLES=88 +TEXT=89 +THEN=90 +TRUE=91 +TO=92 +TYPE=93 +TYPES=94 +USING=95 +VERIFY=96 +WHEN=97 +WHERE=98 +WITH=99 +YEAR=100 +YEARS=101 +ESCAPE_ESC=102 +FUNCTION_ESC=103 +LIMIT_ESC=104 +DATE_ESC=105 +TIME_ESC=106 +TIMESTAMP_ESC=107 +GUID_ESC=108 +ESC_END=109 +EQ=110 +NULLEQ=111 +NEQ=112 +LT=113 +LTE=114 +GT=115 +GTE=116 +PLUS=117 +MINUS=118 +ASTERISK=119 +SLASH=120 +PERCENT=121 +CAST_OP=122 +CONCAT=123 +DOT=124 +PARAM=125 +STRING=126 +INTEGER_VALUE=127 +DECIMAL_VALUE=128 +IDENTIFIER=129 +DIGIT_IDENTIFIER=130 +TABLE_IDENTIFIER=131 +QUOTED_IDENTIFIER=132 +BACKQUOTED_IDENTIFIER=133 +SIMPLE_COMMENT=134 +BRACKETED_COMMENT=135 +WS=136 +UNRECOGNIZED=137 '('=1 ')'=2 ','=3 @@ -170,88 +172,90 @@ UNRECOGNIZED=135 'EXTRACT'=35 'FALSE'=36 'FIRST'=37 -'FORMAT'=38 -'FROM'=39 -'FROZEN'=40 -'FULL'=41 -'FUNCTIONS'=42 -'GRAPHVIZ'=43 -'GROUP'=44 -'HAVING'=45 -'HOUR'=46 -'HOURS'=47 -'IN'=48 -'INCLUDE'=49 -'INNER'=50 -'INTERVAL'=51 -'IS'=52 -'JOIN'=53 -'LAST'=54 -'LEFT'=55 -'LIKE'=56 -'LIMIT'=57 -'MAPPED'=58 -'MATCH'=59 -'MINUTE'=60 -'MINUTES'=61 -'MONTH'=62 -'MONTHS'=63 -'NATURAL'=64 -'NOT'=65 -'NULL'=66 -'NULLS'=67 -'ON'=68 -'OPTIMIZED'=69 -'OR'=70 -'ORDER'=71 -'OUTER'=72 -'PARSED'=73 -'PHYSICAL'=74 -'PLAN'=75 -'RIGHT'=76 -'RLIKE'=77 -'QUERY'=78 -'SCHEMAS'=79 -'SECOND'=80 -'SECONDS'=81 -'SELECT'=82 -'SHOW'=83 -'SYS'=84 -'TABLE'=85 -'TABLES'=86 -'TEXT'=87 -'THEN'=88 -'TRUE'=89 -'TO'=90 -'TYPE'=91 -'TYPES'=92 -'USING'=93 -'VERIFY'=94 -'WHEN'=95 -'WHERE'=96 -'WITH'=97 -'YEAR'=98 -'YEARS'=99 -'{ESCAPE'=100 -'{FN'=101 -'{LIMIT'=102 -'{D'=103 -'{T'=104 -'{TS'=105 -'{GUID'=106 -'}'=107 -'='=108 -'<=>'=109 -'<'=111 -'<='=112 -'>'=113 -'>='=114 -'+'=115 -'-'=116 -'*'=117 -'/'=118 -'%'=119 -'::'=120 -'||'=121 -'.'=122 -'?'=123 +'FOR'=38 +'FORMAT'=39 +'FROM'=40 +'FROZEN'=41 +'FULL'=42 +'FUNCTIONS'=43 +'GRAPHVIZ'=44 +'GROUP'=45 +'HAVING'=46 +'HOUR'=47 +'HOURS'=48 +'IN'=49 +'INCLUDE'=50 +'INNER'=51 +'INTERVAL'=52 +'IS'=53 +'JOIN'=54 +'LAST'=55 +'LEFT'=56 +'LIKE'=57 +'LIMIT'=58 +'MAPPED'=59 +'MATCH'=60 +'MINUTE'=61 +'MINUTES'=62 +'MONTH'=63 +'MONTHS'=64 +'NATURAL'=65 +'NOT'=66 +'NULL'=67 +'NULLS'=68 +'ON'=69 +'OPTIMIZED'=70 +'OR'=71 +'ORDER'=72 +'OUTER'=73 +'PARSED'=74 +'PHYSICAL'=75 +'PIVOT'=76 +'PLAN'=77 +'RIGHT'=78 +'RLIKE'=79 +'QUERY'=80 +'SCHEMAS'=81 +'SECOND'=82 +'SECONDS'=83 +'SELECT'=84 +'SHOW'=85 +'SYS'=86 +'TABLE'=87 +'TABLES'=88 +'TEXT'=89 +'THEN'=90 +'TRUE'=91 +'TO'=92 +'TYPE'=93 +'TYPES'=94 +'USING'=95 +'VERIFY'=96 +'WHEN'=97 +'WHERE'=98 +'WITH'=99 +'YEAR'=100 +'YEARS'=101 +'{ESCAPE'=102 +'{FN'=103 +'{LIMIT'=104 +'{D'=105 +'{T'=106 +'{TS'=107 +'{GUID'=108 +'}'=109 +'='=110 +'<=>'=111 +'<'=113 +'<='=114 +'>'=115 +'>='=116 +'+'=117 +'-'=118 +'*'=119 +'/'=120 +'%'=121 +'::'=122 +'||'=123 +'.'=124 +'?'=125 diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java index 901318258c0b..5fdd1f9124d7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java @@ -40,6 +40,7 @@ import org.elasticsearch.xpack.sql.plan.logical.Join; import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.OrderBy; +import org.elasticsearch.xpack.sql.plan.logical.Pivot; import org.elasticsearch.xpack.sql.plan.logical.Project; import org.elasticsearch.xpack.sql.plan.logical.SubQueryAlias; import org.elasticsearch.xpack.sql.plan.logical.UnaryPlan; @@ -419,7 +420,7 @@ public class Analyzer extends RuleExecutor { return result; } - private List expandStar(UnresolvedStar us, List output) { + static List expandStar(UnresolvedStar us, List output) { List expanded = new ArrayList<>(); // a qualifier is specified - since this is a star, it should be a CompoundDataType @@ -460,24 +461,7 @@ public class Analyzer extends RuleExecutor { } } } else { - // add only primitives - // but filter out multi fields (allow only the top-level value) - Set seenMultiFields = new LinkedHashSet<>(); - - for (Attribute a : output) { - if (!DataTypes.isUnsupported(a.dataType()) && a.dataType().isPrimitive()) { - if (a instanceof FieldAttribute) { - FieldAttribute fa = (FieldAttribute) a; - // skip nested fields and seen multi-fields - if (!fa.isNested() && !seenMultiFields.contains(fa.parent())) { - expanded.add(a); - seenMultiFields.add(a); - } - } else { - expanded.add(a); - } - } - } + expanded.addAll(Expressions.onlyPrimitiveFieldAttributes(output)); } return expanded; @@ -954,12 +938,24 @@ public class Analyzer extends RuleExecutor { } return a; } + if (plan instanceof Pivot) { + Pivot p = (Pivot) plan; + if (p.childrenResolved()) { + if (hasUnresolvedAliases(p.values())) { + p = new Pivot(p.source(), p.child(), p.column(), assignAliases(p.values()), p.aggregates()); + } + if (hasUnresolvedAliases(p.aggregates())) { + p = new Pivot(p.source(), p.child(), p.column(), p.values(), assignAliases(p.aggregates())); + } + } + return p; + } return plan; } private boolean hasUnresolvedAliases(List expressions) { - return expressions != null && expressions.stream().anyMatch(e -> e instanceof UnresolvedAlias); + return expressions != null && Expressions.anyMatch(expressions, e -> e instanceof UnresolvedAlias); } private List assignAliases(List exprs) { @@ -1277,13 +1273,20 @@ public class Analyzer extends RuleExecutor { protected LogicalPlan rule(LogicalPlan plan) { if (plan instanceof Project) { Project p = (Project) plan; - return new Project(p.source(), p.child(), cleanSecondaryAliases(p.projections())); + return new Project(p.source(), p.child(), cleanChildrenAliases(p.projections())); } if (plan instanceof Aggregate) { Aggregate a = (Aggregate) plan; - // clean group expressions - return new Aggregate(a.source(), a.child(), cleanAllAliases(a.groupings()), cleanSecondaryAliases(a.aggregates())); + // aliases inside GROUP BY are irellevant so remove all of them + // however aggregations are important (ultimately a projection) + return new Aggregate(a.source(), a.child(), cleanAllAliases(a.groupings()), cleanChildrenAliases(a.aggregates())); + } + + if (plan instanceof Pivot) { + Pivot p = (Pivot) plan; + return new Pivot(p.source(), p.child(), trimAliases(p.column()), cleanChildrenAliases(p.values()), + cleanChildrenAliases(p.aggregates())); } return plan.transformExpressionsOnly(e -> { @@ -1294,7 +1297,7 @@ public class Analyzer extends RuleExecutor { }); } - private List cleanSecondaryAliases(List args) { + private List cleanChildrenAliases(List args) { List cleaned = new ArrayList<>(args.size()); for (NamedExpression ne : args) { cleaned.add((NamedExpression) trimNonTopLevelAliases(ne)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index d5a4cb436e6a..31636a30c68c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -13,6 +13,8 @@ import org.elasticsearch.xpack.sql.expression.Exists; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.NamedExpression; import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute; @@ -33,13 +35,16 @@ import org.elasticsearch.xpack.sql.plan.logical.Limit; import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.OrderBy; +import org.elasticsearch.xpack.sql.plan.logical.Pivot; import org.elasticsearch.xpack.sql.plan.logical.Project; import org.elasticsearch.xpack.sql.plan.logical.command.Command; import org.elasticsearch.xpack.sql.stats.FeatureMetric; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.tree.Node; import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.DataTypes; import org.elasticsearch.xpack.sql.type.EsField; +import org.elasticsearch.xpack.sql.util.Holder; import org.elasticsearch.xpack.sql.util.StringUtils; import java.util.ArrayList; @@ -64,6 +69,8 @@ import static org.elasticsearch.xpack.sql.stats.FeatureMetric.LOCAL; import static org.elasticsearch.xpack.sql.stats.FeatureMetric.ORDERBY; import static org.elasticsearch.xpack.sql.stats.FeatureMetric.WHERE; import static org.elasticsearch.xpack.sql.type.DataType.GEO_SHAPE; +import static org.elasticsearch.xpack.sql.type.DataType.SHAPE; +import static org.elasticsearch.xpack.sql.util.CollectionUtils.combine; /** * The verifier has the role of checking the analyzed tree for failures and build a list of failures following this check. @@ -71,7 +78,7 @@ import static org.elasticsearch.xpack.sql.type.DataType.GEO_SHAPE; */ public final class Verifier { private final Metrics metrics; - + public Verifier(Metrics metrics) { this.metrics = metrics; } @@ -237,6 +244,7 @@ public final class Verifier { checkForScoreInsideFunctions(p, localFailures); checkNestedUsedInGroupByOrHaving(p, localFailures); checkForGeoFunctionsOnDocValues(p, localFailures); + checkPivot(p, localFailures); // everything checks out // mark the plan as analyzed @@ -247,7 +255,7 @@ public final class Verifier { failures.addAll(localFailures); }); } - + // gather metrics if (failures.isEmpty()) { BitSet b = new BitSet(FeatureMetric.values().length); @@ -464,20 +472,39 @@ public final class Verifier { private static boolean checkGroupByInexactField(LogicalPlan p, Set localFailures) { if (p instanceof Aggregate) { - Aggregate a = (Aggregate) p; - - // The grouping can not be an aggregate function or an inexact field (e.g. text without a keyword) - a.groupings().forEach(e -> e.forEachUp(c -> { - EsField.Exact exact = c.getExactInfo(); - if (exact.hasExact() == false) { - localFailures.add(fail(c, "Field [" + c.sourceText() + "] of data type [" + c.dataType().typeName + "] " + - "cannot be used for grouping; " + exact.errorMsg())); - } - }, FieldAttribute.class)); + return onlyExactFields(((Aggregate) p).groupings(), localFailures); } return true; } + // The grouping can not be an aggregate function or an inexact field (e.g. text without a keyword) + private static boolean onlyExactFields(List expressions, Set localFailures) { + Holder onlyExact = new Holder<>(Boolean.TRUE); + + expressions.forEach(e -> e.forEachUp(c -> { + EsField.Exact exact = c.getExactInfo(); + if (exact.hasExact() == false) { + localFailures.add(fail(c, "Field [{}] of data type [{}] cannot be used for grouping; {}", c.sourceText(), + c.dataType().typeName, exact.errorMsg())); + onlyExact.set(Boolean.FALSE); + } + }, FieldAttribute.class)); + + return onlyExact.get(); + } + + private static boolean onlyRawFields(Iterable expressions, Set localFailures) { + Holder onlyExact = new Holder<>(Boolean.TRUE); + + expressions.forEach(e -> e.forEachDown(c -> { + if (c instanceof Function || c instanceof FunctionAttribute) { + localFailures.add(fail(c, "No functions allowed (yet); encountered [{}]", c.sourceText())); + onlyExact.set(Boolean.FALSE); + } + })); + return onlyExact.get(); + } + private static boolean checkGroupByTime(LogicalPlan p, Set localFailures) { if (p instanceof Aggregate) { Aggregate a = (Aggregate) p; @@ -605,7 +632,7 @@ public final class Verifier { if (Functions.isAggregate(e)) { return true; } - + // left without leaves which have to match; if not there's a failure // make sure to match directly on the expression and not on the tree // (since otherwise exp might match the function argument which would be incorrect) @@ -618,15 +645,16 @@ public final class Verifier { } return false; } - + private static void checkGroupingFunctionInGroupBy(LogicalPlan p, Set localFailures) { // check if the query has a grouping function (Histogram) but no GROUP BY if (p instanceof Project) { Project proj = (Project) p; proj.projections().forEach(e -> e.forEachDown(f -> localFailures.add(fail(f, "[{}] needs to be part of the grouping", Expressions.name(f))), GroupingFunction.class)); - } else if (p instanceof Aggregate) { - // if it does have a GROUP BY, check if the groupings contain the grouping functions (Histograms) + } + // if it does have a GROUP BY, check if the groupings contain the grouping functions (Histograms) + else if (p instanceof Aggregate) { Aggregate a = (Aggregate) p; a.aggregates().forEach(agg -> agg.forEachDown(e -> { if (a.groupings().size() == 0 @@ -707,14 +735,14 @@ public final class Verifier { fail(nested.get(0), "Grouping isn't (yet) compatible with nested fields " + new AttributeSet(nested).names())); nested.clear(); } - + // check in having p.forEachDown(f -> { if (f.child() instanceof Aggregate) { f.condition().forEachUp(match, FieldAttribute.class); } }, Filter.class); - + if (!nested.isEmpty()) { localFailures.add( fail(nested.get(0), "HAVING isn't (yet) compatible with nested fields " + new AttributeSet(nested).names())); @@ -731,6 +759,9 @@ public final class Verifier { if (fa.field().getDataType() == GEO_SHAPE) { localFailures.add(fail(fa, "geo shapes cannot be used for filtering")); } + if (fa.field().getDataType() == SHAPE) { + localFailures.add(fail(fa, "shapes cannot be used for filtering")); + } }, FieldAttribute.class); }, Filter.class); @@ -739,6 +770,9 @@ public final class Verifier { if (fa.field().getDataType() == GEO_SHAPE) { localFailures.add(fail(fa, "geo shapes cannot be used in grouping")); } + if (fa.field().getDataType() == SHAPE) { + localFailures.add(fail(fa, "shapes cannot be used in grouping")); + } }, FieldAttribute.class)), Aggregate.class); @@ -747,6 +781,67 @@ public final class Verifier { if (fa.field().getDataType() == GEO_SHAPE) { localFailures.add(fail(fa, "geo shapes cannot be used for sorting")); } + if (fa.field().getDataType() == SHAPE) { + localFailures.add(fail(fa, "shapes cannot be used for sorting")); + } }, FieldAttribute.class)), OrderBy.class); } + + private static void checkPivot(LogicalPlan p, Set localFailures) { + p.forEachDown(pv -> { + // check only exact fields are used inside PIVOTing + if (onlyExactFields(combine(pv.groupingSet(), pv.column()), localFailures) == false + || onlyRawFields(pv.groupingSet(), localFailures) == false) { + // if that is not the case, no need to do further validation since the declaration is fundamentally wrong + return; + } + + // check values + DataType colType = pv.column().dataType(); + for (NamedExpression v : pv.values()) { + // check all values are foldable + Expression ex = v instanceof Alias ? ((Alias) v).child() : v; + if (ex instanceof Literal == false) { + localFailures.add(fail(v, "Non-literal [{}] found inside PIVOT values", v.name())); + } + else if (ex.foldable() && ex.fold() == null) { + localFailures.add(fail(v, "Null not allowed as a PIVOT value", v.name())); + } + // and that their type is compatible with that of the column + else if (DataTypes.areTypesCompatible(colType, v.dataType()) == false) { + localFailures.add(fail(v, "Literal [{}] of type [{}] does not match type [{}] of PIVOT column [{}]", v.name(), + v.dataType().typeName, colType.typeName, pv.column().sourceText())); + } + } + + // check aggregate function, in particular formulas that might hide literals or scalars + pv.aggregates().forEach(a -> { + Holder hasAggs = new Holder<>(Boolean.FALSE); + List aggs = a.collectFirstChildren(c -> { + // skip aggregate functions + if (Functions.isAggregate(c)) { + hasAggs.set(Boolean.TRUE); + return true; + } + if (c.children().isEmpty()) { + return true; + } + return false; + }); + + if (Boolean.FALSE.equals(hasAggs.get())) { + localFailures.add(fail(a, "No aggregate function found in PIVOT at [{}]", a.sourceText())); + } + // check mixture of Agg and column (wrapped in scalar) + else { + for (Expression agg : aggs) { + if (agg instanceof FieldAttribute) { + localFailures.add(fail(a, "Non-aggregate function found in PIVOT at [{}]", a.sourceText())); + } + } + } + }); + + }, Pivot.class); + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java similarity index 72% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java index 76c05c4f6a1b..4f448f049f9b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java @@ -37,14 +37,14 @@ import java.util.BitSet; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.function.Consumer; -import java.util.function.Function; +import java.util.function.BiFunction; +import java.util.function.Supplier; /** * Cursor for composite aggregation (GROUP BY). * Stores the query that gets updated/slides across requests. */ -public class CompositeAggregationCursor implements Cursor { +public class CompositeAggCursor implements Cursor { private final Logger log = LogManager.getLogger(getClass()); @@ -57,7 +57,7 @@ public class CompositeAggregationCursor implements Cursor { private final int limit; private final boolean includeFrozen; - CompositeAggregationCursor(byte[] next, List exts, BitSet mask, int remainingLimit, boolean includeFrozen, + CompositeAggCursor(byte[] next, List exts, BitSet mask, int remainingLimit, boolean includeFrozen, String... indices) { this.indices = indices; this.nextQuery = next; @@ -67,7 +67,7 @@ public class CompositeAggregationCursor implements Cursor { this.includeFrozen = includeFrozen; } - public CompositeAggregationCursor(StreamInput in) throws IOException { + public CompositeAggCursor(StreamInput in) throws IOException { indices = in.readStringArray(); nextQuery = in.readByteArray(); limit = in.readVInt(); @@ -86,7 +86,6 @@ public class CompositeAggregationCursor implements Cursor { out.writeNamedWriteableList(extractors); out.writeByteArray(mask.toByteArray()); out.writeBoolean(includeFrozen); - } @Override @@ -133,16 +132,17 @@ public class CompositeAggregationCursor implements Cursor { log.trace("About to execute composite query {} on {}", StringUtils.toString(query), indices); } - SearchRequest search = Querier.prepareRequest(client, query, cfg.pageTimeout(), includeFrozen, indices); + SearchRequest request = Querier.prepareRequest(client, query, cfg.pageTimeout(), includeFrozen, indices); - client.search(search, new ActionListener<>() { + client.search(request, new ActionListener<>() { @Override - public void onResponse(SearchResponse r) { - handle(r, search.source(), ba -> new CompositeAggsRowSet(extractors, mask, r, limit, ba), - () -> client.search(search, this), - p -> listener.onResponse(p), - e -> listener.onFailure(e), - Schema.EMPTY, includeFrozen, indices); + public void onResponse(SearchResponse response) { + handle(response, request.source(), + makeRowSet(response), + makeCursor(), + () -> client.search(request, this), + listener, + Schema.EMPTY); } @Override @@ -151,41 +151,56 @@ public class CompositeAggregationCursor implements Cursor { } }); } + + protected Supplier makeRowSet(SearchResponse response) { + return () -> new CompositeAggRowSet(extractors, mask, response, limit); + } - static void handle(SearchResponse response, SearchSourceBuilder source, Function makeRowSet, - Runnable retry, Consumer onPage, Consumer onFailure, - Schema schema, boolean includeFrozen, String[] indices) { + protected BiFunction makeCursor() { + return (q, r) -> new CompositeAggCursor(q, r.extractors(), r.mask(), r.remainingData(), includeFrozen, indices); + } + + static void handle(SearchResponse response, SearchSourceBuilder source, + Supplier makeRowSet, + BiFunction makeCursor, + Runnable retry, + ActionListener listener, + Schema schema) { // there are some results if (response.getAggregations().asList().isEmpty() == false) { // retry - if (CompositeAggregationCursor.shouldRetryDueToEmptyPage(response)) { - CompositeAggregationCursor.updateCompositeAfterKey(response, source); + if (shouldRetryDueToEmptyPage(response)) { + updateCompositeAfterKey(response, source); retry.run(); return; } try { - boolean hasAfterKey = updateCompositeAfterKey(response, source); - byte[] queryAsBytes = hasAfterKey ? serializeQuery(source) : null; - CompositeAggsRowSet rowSet = makeRowSet.apply(queryAsBytes); + CompositeAggRowSet rowSet = makeRowSet.get(); + Map afterKey = rowSet.afterKey(); + + byte[] queryAsBytes = null; + if (afterKey != null) { + updateSourceAfterKey(afterKey, source); + queryAsBytes = serializeQuery(source); + } Cursor next = rowSet.remainingData() == 0 ? Cursor.EMPTY - : new CompositeAggregationCursor(queryAsBytes, rowSet.extractors(), rowSet.mask(), - rowSet.remainingData(), includeFrozen, indices); - onPage.accept(new Page(rowSet, next)); + : makeCursor.apply(queryAsBytes, rowSet); + listener.onResponse(new Page(rowSet, next)); } catch (Exception ex) { - onFailure.accept(ex); + listener.onFailure(ex); } } // no results else { - onPage.accept(Page.last(Rows.empty(schema))); + listener.onResponse(Page.last(Rows.empty(schema))); } } - static boolean shouldRetryDueToEmptyPage(SearchResponse response) { + private static boolean shouldRetryDueToEmptyPage(SearchResponse response) { CompositeAggregation composite = getComposite(response); // if there are no buckets but a next page, go fetch it instead of sending an empty response to the client return composite != null && composite.getBuckets().isEmpty() && composite.afterKey() != null && !composite.afterKey().isEmpty(); @@ -204,25 +219,22 @@ public class CompositeAggregationCursor implements Cursor { throw new SqlIllegalArgumentException("Unrecognized root group found; {}", agg.getClass()); } - static boolean updateCompositeAfterKey(SearchResponse r, SearchSourceBuilder next) { + private static void updateCompositeAfterKey(SearchResponse r, SearchSourceBuilder search) { CompositeAggregation composite = getComposite(r); if (composite == null) { throw new SqlIllegalArgumentException("Invalid server response; no group-by detected"); } - Map afterKey = composite.afterKey(); - // a null after-key means done - if (afterKey == null) { - return false; - } + updateSourceAfterKey(composite.afterKey(), search); + } - AggregationBuilder aggBuilder = next.aggregations().getAggregatorFactories().iterator().next(); + private static void updateSourceAfterKey(Map afterKey, SearchSourceBuilder search) { + AggregationBuilder aggBuilder = search.aggregations().getAggregatorFactories().iterator().next(); // update after-key with the new value if (aggBuilder instanceof CompositeAggregationBuilder) { CompositeAggregationBuilder comp = (CompositeAggregationBuilder) aggBuilder; comp.aggregateAfter(afterKey); - return true; } else { throw new SqlIllegalArgumentException("Invalid client request; expected a group-by but instead got {}", aggBuilder); } @@ -240,7 +252,7 @@ public class CompositeAggregationCursor implements Cursor { /** * Serializes the search source to a byte array. */ - static byte[] serializeQuery(SearchSourceBuilder source) throws IOException { + private static byte[] serializeQuery(SearchSourceBuilder source) throws IOException { if (source == null) { return new byte[0]; } @@ -259,7 +271,7 @@ public class CompositeAggregationCursor implements Cursor { @Override public int hashCode() { - return Objects.hash(Arrays.hashCode(indices), Arrays.hashCode(nextQuery), extractors, limit); + return Objects.hash(Arrays.hashCode(indices), Arrays.hashCode(nextQuery), extractors, limit, mask, includeFrozen); } @Override @@ -267,15 +279,16 @@ public class CompositeAggregationCursor implements Cursor { if (obj == null || obj.getClass() != getClass()) { return false; } - CompositeAggregationCursor other = (CompositeAggregationCursor) obj; + CompositeAggCursor other = (CompositeAggCursor) obj; return Arrays.equals(indices, other.indices) && Arrays.equals(nextQuery, other.nextQuery) && Objects.equals(extractors, other.extractors) - && Objects.equals(limit, other.limit); + && Objects.equals(limit, other.limit) + && Objects.equals(includeFrozen, other.includeFrozen); } @Override public String toString() { return "cursor for composite on index [" + Arrays.toString(indices) + "]"; } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggsRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggRowSet.java similarity index 70% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggsRowSet.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggRowSet.java index dd6b85279cb2..1262e80e0663 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggsRowSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggRowSet.java @@ -12,50 +12,50 @@ import org.elasticsearch.xpack.sql.session.RowSet; import java.util.BitSet; import java.util.List; +import java.util.Map; import static java.util.Collections.emptyList; /** * {@link RowSet} specific to (GROUP BY) aggregation. */ -class CompositeAggsRowSet extends ResultRowSet { +class CompositeAggRowSet extends ResultRowSet { - private final List buckets; - private final int remainingData; - private final int size; - private int row = 0; + final List buckets; - CompositeAggsRowSet(List exts, BitSet mask, SearchResponse response, int limit, byte[] next) { + Map afterKey; + int remainingData; + int size; + int row = 0; + + CompositeAggRowSet(List exts, BitSet mask, SearchResponse response, int limit) { super(exts, mask); - CompositeAggregation composite = CompositeAggregationCursor.getComposite(response); + CompositeAggregation composite = CompositeAggCursor.getComposite(response); if (composite != null) { buckets = composite.getBuckets(); + afterKey = composite.afterKey(); } else { buckets = emptyList(); + afterKey = null; } // page size size = limit == -1 ? buckets.size() : Math.min(buckets.size(), limit); + remainingData = remainingData(afterKey != null, size, limit); + } - if (next == null) { - remainingData = 0; + static int remainingData(boolean hasNextPage, int size, int limit) { + if (hasNextPage == false) { + return 0; } else { - // Compute remaining limit - - // If the limit is -1 then we have a local sorting (sort on aggregate function) that requires all the buckets - // to be processed so we stop only when all data is exhausted. int remainingLimit = (limit == -1) ? limit : ((limit - size) >= 0 ? (limit - size) : 0); // if the computed limit is zero, or the size is zero it means either there's nothing left or the limit has been reached // note that a composite agg might be valid but return zero groups (since these can be filtered with HAVING/bucket selector) // however the Querier takes care of that and keeps making requests until either the query is invalid or at least one response // is returned. - if (size == 0 || remainingLimit == 0) { - remainingData = 0; - } else { - remainingData = remainingLimit; - } + return size == 0 ? size : remainingLimit; } } @@ -91,4 +91,8 @@ class CompositeAggsRowSet extends ResultRowSet { int remainingData() { return remainingData; } + + Map afterKey() { + return afterKey; + } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotCursor.java new file mode 100644 index 000000000000..a815602d950b --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotCursor.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.execution.search; + +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.sql.execution.search.extractor.BucketExtractor; +import org.elasticsearch.xpack.sql.type.Schema; + +import java.io.IOException; +import java.util.Arrays; +import java.util.BitSet; +import java.util.List; +import java.util.Map; +import java.util.function.BiFunction; +import java.util.function.Supplier; + +public class PivotCursor extends CompositeAggCursor { + + public static final String NAME = "p"; + + private final Map previousKey; + + PivotCursor(Map previousKey, byte[] next, List exts, BitSet mask, int remainingLimit, + boolean includeFrozen, + String... indices) { + super(next, exts, mask, remainingLimit, includeFrozen, indices); + this.previousKey = previousKey; + } + + public PivotCursor(StreamInput in) throws IOException { + super(in); + previousKey = in.readBoolean() == true ? in.readMap() : null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + if (previousKey != null) { + out.writeBoolean(true); + out.writeMap(previousKey); + } else { + out.writeBoolean(false); + } + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Supplier makeRowSet(SearchResponse response) { + return () -> new PivotRowSet(Schema.EMPTY, extractors(), mask(), response, limit(), previousKey); + } + + @Override + protected BiFunction makeCursor() { + return (q, r) -> { + Map lastAfterKey = r instanceof PivotRowSet ? ((PivotRowSet) r).lastAfterKey() : null; + return new PivotCursor(lastAfterKey, q, r.extractors(), r.mask(), r.remainingData(), includeFrozen(), indices()); + }; + } + + @Override + public String toString() { + return "pivot for index [" + Arrays.toString(indices()) + "]"; + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotRowSet.java new file mode 100644 index 000000000000..3d7e12b3d9b0 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/PivotRowSet.java @@ -0,0 +1,147 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.execution.search; + +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; +import org.elasticsearch.xpack.sql.execution.search.extractor.BucketExtractor; +import org.elasticsearch.xpack.sql.type.Schema; + +import java.util.ArrayList; +import java.util.BitSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Objects; + +import static java.util.Collections.emptyList; + +class PivotRowSet extends SchemaCompositeAggRowSet { + + private final List data; + private final Map lastAfterKey; + + PivotRowSet(Schema schema, List exts, BitSet mask, SearchResponse response, int limit, + Map previousLastKey) { + super(schema, exts, mask, response, limit); + + data = buckets.isEmpty() ? emptyList() : new ArrayList<>(); + + // the last page contains no data, handle that to avoid NPEs and such + if (buckets.isEmpty()) { + lastAfterKey = null; + return; + } + + // consume buckets until all pivot columns are initialized or the next grouping starts + // to determine a group, find all group-by extractors (CompositeKeyExtractor) + // extract their values and keep iterating through the buckets as long as the result is the same + + Map currentRowGroupKey = null; + Map lastCompletedGroupKey = null; + Object[] currentRow = new Object[columnCount()]; + + for (int bucketIndex = 0; bucketIndex < buckets.size(); bucketIndex++) { + CompositeAggregation.Bucket bucket = buckets.get(bucketIndex); + Map key = bucket.getKey(); + + // does the bucket below to the same group? + if (currentRowGroupKey == null || sameCompositeKey(currentRowGroupKey, key)) { + currentRowGroupKey = key; + } + // done computing row + else { + // be sure to remember the last consumed group before changing to the new one + lastCompletedGroupKey = currentRowGroupKey; + currentRowGroupKey = key; + // save the data + data.add(currentRow); + + if (limit > 0 && data.size() == limit) { + break; + } + // create a new row + currentRow = new Object[columnCount()]; + } + + for (int columnIndex = 0; columnIndex < currentRow.length; columnIndex++) { + BucketExtractor extractor = userExtractor(columnIndex); + Object value = extractor.extract(bucket); + + // rerun the bucket through all the extractors but update only the non-null components + // since the pivot extractors will react only when encountering the matching group + if (currentRow[columnIndex] == null && value != null) { + currentRow[columnIndex] = value; + } + } + } + + // check the last group using the following: + // a. limit has been reached, the rest of the data is ignored. + if (limit > 0 && data.size() == limit) { + afterKey = null; + } + // b. the last key has been sent before (it's the last page) + else if ((previousLastKey != null && sameCompositeKey(previousLastKey, currentRowGroupKey))) { + data.add(currentRow); + afterKey = null; + } + // c. all the values are initialized (there might be another page but no need to ask for the group again) + // d. or no data was added (typically because there's a null value such as the group) + else if (hasNull(currentRow) == false || data.isEmpty()) { + data.add(currentRow); + afterKey = currentRowGroupKey; + } + // otherwise we can't tell whether it's complete or not + // so discard the last group and ask for it on the next page + else { + afterKey = lastCompletedGroupKey; + } + + // lastly initialize the size and remainingData + size = data.size(); + remainingData = remainingData(afterKey != null, size, limit); + lastAfterKey = currentRowGroupKey; + } + + private boolean hasNull(Object[] currentRow) { + for (Object object : currentRow) { + if (object == null) { + return true; + } + } + return false; + } + + // compare the equality of two composite key WITHOUT the last group + // this method relies on the internal map implementation which preserves the key position + // hence why the comparison happens against the current key (not the previous one which might + // have a different order due to serialization) + static boolean sameCompositeKey(Map previous, Map current) { + int keys = current.size() - 1; + int keyIndex = 0; + for (Entry entry : current.entrySet()) { + if (keyIndex++ >= keys) { + return true; + } + if (Objects.equals(entry.getValue(), previous.get(entry.getKey())) == false) { + return false; + } + } + // there's no other key, it's the same group + return true; + } + + @Override + protected Object getColumn(int column) { + return data.get(row)[column]; + } + + Map lastAfterKey() { + return lastAfterKey; + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 10b4d8663ef6..6b8bf7ab6df9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -36,6 +36,7 @@ import org.elasticsearch.xpack.sql.execution.search.extractor.ConstantExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.FieldHitExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.MetricAggExtractor; +import org.elasticsearch.xpack.sql.execution.search.extractor.PivotExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.TopHitsAggExtractor; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.ExpressionId; @@ -50,6 +51,7 @@ import org.elasticsearch.xpack.sql.querydsl.container.ComputedRef; import org.elasticsearch.xpack.sql.querydsl.container.GlobalCountRef; import org.elasticsearch.xpack.sql.querydsl.container.GroupByRef; import org.elasticsearch.xpack.sql.querydsl.container.MetricAggRef; +import org.elasticsearch.xpack.sql.querydsl.container.PivotColumnRef; import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer; import org.elasticsearch.xpack.sql.querydsl.container.ScriptFieldRef; import org.elasticsearch.xpack.sql.querydsl.container.SearchHitFieldRef; @@ -71,9 +73,12 @@ import java.util.BitSet; import java.util.Comparator; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.BiFunction; +import java.util.function.Supplier; import static java.util.Collections.singletonList; import static org.elasticsearch.action.ActionListener.wrap; @@ -321,21 +326,39 @@ public class Querier { */ static class CompositeActionListener extends BaseAggActionListener { + private final boolean isPivot; + CompositeActionListener(ActionListener listener, Client client, Configuration cfg, List output, QueryContainer query, SearchRequest request) { super(listener, client, cfg, output, query, request); + + isPivot = query.fields().stream().anyMatch(t -> t.v1() instanceof PivotColumnRef); } @Override protected void handleResponse(SearchResponse response, ActionListener listener) { - CompositeAggregationCursor.handle(response, request.source(), - ba -> new SchemaCompositeAggsRowSet(schema, initBucketExtractors(response), mask, response, - query.sortingColumns().isEmpty() ? query.limit() : -1, ba), + Supplier makeRowSet = isPivot ? + () -> new PivotRowSet(schema, initBucketExtractors(response), mask, response, + query.sortingColumns().isEmpty() ? query.limit() : -1, null) : + () -> new SchemaCompositeAggRowSet(schema, initBucketExtractors(response), mask, response, + query.sortingColumns().isEmpty() ? query.limit() : -1); + + BiFunction makeCursor = isPivot ? + (q, r) -> { + Map lastAfterKey = r instanceof PivotRowSet ? ((PivotRowSet) r).lastAfterKey() : null; + return new PivotCursor(lastAfterKey, q, r.extractors(), r.mask(), r.remainingData(), query.shouldIncludeFrozen(), + request.indices()); + } : + (q, r) -> new CompositeAggCursor(q, r.extractors(), r.mask(), r.remainingData, query.shouldIncludeFrozen(), + request.indices()); + + CompositeAggCursor.handle(response, request.source(), + makeRowSet, + makeCursor, () -> client.search(request, this), - p -> listener.onResponse(p), - e -> listener.onFailure(e), - schema, query.shouldIncludeFrozen(), request.indices()); + listener, + schema); } } @@ -381,6 +404,11 @@ public class Querier { return new TopHitsAggExtractor(r.name(), r.fieldDataType(), cfg.zoneId()); } + if (ref instanceof PivotColumnRef) { + PivotColumnRef r = (PivotColumnRef) ref; + return new PivotExtractor(createExtractor(r.pivot(), totalCount), createExtractor(r.agg(), totalCount), r.value()); + } + if (ref == GlobalCountRef.INSTANCE) { return totalCount; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaCompositeAggsRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaCompositeAggRowSet.java similarity index 77% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaCompositeAggsRowSet.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaCompositeAggRowSet.java index 7eeb8b28f154..eb4d568f5570 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaCompositeAggsRowSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaCompositeAggRowSet.java @@ -18,12 +18,12 @@ import java.util.List; * Extension of the {@link RowSet} over a composite agg, extending it to provide its schema. * Used for the initial response. */ -class SchemaCompositeAggsRowSet extends CompositeAggsRowSet implements SchemaRowSet { +class SchemaCompositeAggRowSet extends CompositeAggRowSet implements SchemaRowSet { private final Schema schema; - SchemaCompositeAggsRowSet(Schema schema, List exts, BitSet mask, SearchResponse r, int limitAggs, byte[] next) { - super(exts, mask, r, limitAggs, next); + SchemaCompositeAggRowSet(Schema schema, List exts, BitSet mask, SearchResponse r, int limitAggs) { + super(exts, mask, r, limitAggs); this.schema = schema; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaDelegatingRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaDelegatingRowSet.java new file mode 100644 index 000000000000..ccfe1ad55f25 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaDelegatingRowSet.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.execution.search; + +import org.elasticsearch.xpack.sql.session.RowSet; +import org.elasticsearch.xpack.sql.session.SchemaRowSet; +import org.elasticsearch.xpack.sql.type.Schema; + +class SchemaDelegatingRowSet implements SchemaRowSet { + + private final Schema schema; + private final RowSet delegate; + + SchemaDelegatingRowSet(Schema schema, RowSet delegate) { + this.schema = schema; + this.delegate = delegate; + } + + @Override + public Schema schema() { + return schema; + } + + @Override + public boolean hasCurrentRow() { + return delegate.hasCurrentRow(); + } + + @Override + public boolean advanceRow() { + return delegate.advanceRow(); + } + + @Override + public int size() { + return delegate.size(); + } + + @Override + public void reset() { + delegate.reset(); + } + + @Override + public Object column(int index) { + return delegate.column(index); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java index 4e343c1e54f5..868dd2dcfffc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java @@ -76,6 +76,10 @@ public abstract class SourceGenerator { // set page size if (size != null) { int sz = container.limit() > 0 ? Math.min(container.limit(), size) : size; + // now take into account the the minimum page (if set) + // that is, return the multiple of the minimum page size closer to the set size + int minSize = container.minPageSize(); + sz = minSize > 0 ? (Math.max(sz / minSize, 1) * minSize) : sz; if (source.size() == -1) { source.size(sz); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/BucketExtractors.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/BucketExtractors.java index 221662b79c11..bcbbce8e4576 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/BucketExtractors.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/BucketExtractors.java @@ -26,6 +26,7 @@ public final class BucketExtractors { entries.add(new Entry(BucketExtractor.class, MetricAggExtractor.NAME, MetricAggExtractor::new)); entries.add(new Entry(BucketExtractor.class, TopHitsAggExtractor.NAME, TopHitsAggExtractor::new)); entries.add(new Entry(BucketExtractor.class, ConstantExtractor.NAME, ConstantExtractor::new)); + entries.add(new Entry(BucketExtractor.class, PivotExtractor.NAME, PivotExtractor::new)); return entries; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java index d7609ebc8f9f..9f61775cedc2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java @@ -190,6 +190,13 @@ public class FieldHitExtractor implements HitExtractor { throw new SqlIllegalArgumentException("Cannot read geo_shape value [{}] (returned by [{}])", values, fieldName); } } + if (dataType == DataType.SHAPE) { + try { + return new GeoShape(values); + } catch (IOException ex) { + throw new SqlIllegalArgumentException("Cannot read shape value [{}] (returned by [{}])", values, fieldName); + } + } if (values instanceof Map) { throw new SqlIllegalArgumentException("Objects (returned by [{}]) are not supported", fieldName); } @@ -198,7 +205,7 @@ public class FieldHitExtractor implements HitExtractor { return DateUtils.asDateTime(Long.parseLong(values.toString()), zoneId); } } - + // The Jackson json parser can generate for numerics - Integers, Longs, BigIntegers (if Long is not enough) // and BigDecimal (if Double is not enough) if (values instanceof Number || values instanceof String || values instanceof Boolean) { @@ -266,7 +273,7 @@ public class FieldHitExtractor implements HitExtractor { for (int i = idx + 1; i < path.length; i++) { sj.add(path[i]); Object node = subMap.get(sj.toString()); - + if (node instanceof List) { List listOfValues = (List) node; // we can only do this optimization until the last element of our pass since geo points are using arrays @@ -281,7 +288,7 @@ public class FieldHitExtractor implements HitExtractor { return unwrapMultiValue(node); } } - + if (node instanceof Map) { if (i < path.length - 1) { // Add the sub-map to the queue along with the current path index @@ -318,7 +325,7 @@ public class FieldHitExtractor implements HitExtractor { public String fieldName() { return fieldName; } - + public String fullFieldName() { return fullFieldName; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/PivotExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/PivotExtractor.java new file mode 100644 index 000000000000..e7c1b8dfa302 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/PivotExtractor.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.execution.search.extractor; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; + +import java.io.IOException; +import java.util.Objects; + +public class PivotExtractor implements BucketExtractor { + + static final String NAME = "pv"; + + private final BucketExtractor groupExtractor; + private final BucketExtractor metricExtractor; + private final Object value; + + public PivotExtractor(BucketExtractor groupExtractor, BucketExtractor metricExtractor, Object value) { + this.groupExtractor = groupExtractor; + this.metricExtractor = metricExtractor; + this.value = value; + } + + PivotExtractor(StreamInput in) throws IOException { + groupExtractor = in.readNamedWriteable(BucketExtractor.class); + metricExtractor = in.readNamedWriteable(BucketExtractor.class); + value = in.readGenericValue(); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeNamedWriteable(groupExtractor); + out.writeNamedWriteable(metricExtractor); + out.writeGenericValue(value); + } + + @Override + public Object extract(Bucket bucket) { + if (Objects.equals(value, groupExtractor.extract(bucket))) { + return metricExtractor.extract(bucket); + } + return null; + } + + @Override + public int hashCode() { + return Objects.hash(groupExtractor, metricExtractor, value); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + PivotExtractor other = (PivotExtractor) obj; + return Objects.equals(groupExtractor, other.groupExtractor) + && Objects.equals(metricExtractor, other.metricExtractor) + && Objects.equals(value, other.value); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java index f4c8526bf47b..4ebc030c281d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java @@ -108,7 +108,7 @@ public class Alias extends NamedExpression { Attribute attr = Expressions.attribute(c); if (attr != null) { - return attr.clone(source(), name(), qualifier, child.nullable(), id(), synthetic()); + return attr.clone(source(), name(), child.dataType(), qualifier, child.nullable(), id(), synthetic()); } else { // TODO: WE need to fix this fake Field diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Attribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Attribute.java index 2f8b6633249d..9f6b54badaf2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Attribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Attribute.java @@ -9,6 +9,7 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.tree.Source; +import org.elasticsearch.xpack.sql.type.DataType; import java.util.List; import java.util.Objects; @@ -87,19 +88,33 @@ public abstract class Attribute extends NamedExpression { } public Attribute withLocation(Source source) { - return Objects.equals(source(), source) ? this : clone(source, name(), qualifier(), nullable(), id(), synthetic()); + return Objects.equals(source(), source) ? this : clone(source, name(), dataType(), qualifier(), nullable(), id(), synthetic()); } public Attribute withQualifier(String qualifier) { - return Objects.equals(qualifier(), qualifier) ? this : clone(source(), name(), qualifier, nullable(), id(), synthetic()); + return Objects.equals(qualifier(), qualifier) ? this : clone(source(), name(), dataType(), qualifier, nullable(), id(), + synthetic()); + } + + public Attribute withName(String name) { + return Objects.equals(name(), name) ? this : clone(source(), name, dataType(), qualifier(), nullable(), id(), synthetic()); } public Attribute withNullability(Nullability nullability) { - return Objects.equals(nullable(), nullability) ? this : clone(source(), name(), qualifier(), nullability, id(), synthetic()); + return Objects.equals(nullable(), nullability) ? this : clone(source(), name(), dataType(), qualifier(), nullability, id(), + synthetic()); } - protected abstract Attribute clone(Source source, String name, String qualifier, Nullability nullability, ExpressionId id, - boolean synthetic); + public Attribute withDataType(DataType type) { + return Objects.equals(dataType(), type) ? this : clone(source(), name(), type, qualifier(), nullable(), id(), synthetic()); + } + + public Attribute withId(ExpressionId id) { + return clone(source(), name(), dataType(), qualifier(), nullable(), id, synthetic()); + } + + protected abstract Attribute clone(Source source, String name, DataType type, String qualifier, Nullability nullability, + ExpressionId id, boolean synthetic); @Override public Attribute toAttribute() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/ExpressionId.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/ExpressionId.java index 55f947a20ac7..cbc622a615cb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/ExpressionId.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/ExpressionId.java @@ -25,6 +25,10 @@ public class ExpressionId { this.id = COUNTER.incrementAndGet(); } + public ExpressionId(long id) { + this.id = id; + } + @Override public int hashCode() { return Objects.hash(id); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java index ca5e4b757567..0515d4f11b4e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java @@ -8,10 +8,13 @@ package org.elasticsearch.xpack.sql.expression; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.DataTypes; import java.util.ArrayList; import java.util.Collection; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Set; import java.util.function.Predicate; import static java.util.Collections.emptyList; @@ -134,6 +137,30 @@ public final class Expressions { return true; } + public static List onlyPrimitiveFieldAttributes(Collection attributes) { + List filtered = new ArrayList<>(); + // add only primitives + // but filter out multi fields (allow only the top-level value) + Set seenMultiFields = new LinkedHashSet<>(); + + for (Attribute a : attributes) { + if (!DataTypes.isUnsupported(a.dataType()) && a.dataType().isPrimitive()) { + if (a instanceof FieldAttribute) { + FieldAttribute fa = (FieldAttribute) a; + // skip nested fields and seen multi-fields + if (!fa.isNested() && !seenMultiFields.contains(fa.parent())) { + filtered.add(a); + seenMultiFields.add(a); + } + } else { + filtered.add(a); + } + } + } + + return filtered; + } + public static Pipe pipe(Expression e) { if (e instanceof NamedExpression) { return ((NamedExpression) e).asPipe(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java index cb86e2742b2d..c0cd9a95eb68 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java @@ -36,10 +36,15 @@ public class FieldAttribute extends TypedAttribute { public FieldAttribute(Source source, FieldAttribute parent, String name, EsField field) { this(source, parent, name, field, null, Nullability.TRUE, null, false); } + + public FieldAttribute(Source source, FieldAttribute parent, String name, EsField field, String qualifier, Nullability nullability, + ExpressionId id, boolean synthetic) { + this(source, parent, name, field.getDataType(), field, qualifier, nullability, id, synthetic); + } - public FieldAttribute(Source source, FieldAttribute parent, String name, EsField field, String qualifier, + public FieldAttribute(Source source, FieldAttribute parent, String name, DataType type, EsField field, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { - super(source, name, field.getDataType(), qualifier, nullability, id, synthetic); + super(source, name, type, qualifier, nullability, id, synthetic); this.path = parent != null ? parent.name() : StringUtils.EMPTY; this.parent = parent; this.field = field; @@ -57,7 +62,7 @@ public class FieldAttribute extends TypedAttribute { @Override protected NodeInfo info() { - return NodeInfo.create(this, FieldAttribute::new, parent, name(), field, qualifier(), nullable(), id(), synthetic()); + return NodeInfo.create(this, FieldAttribute::new, parent, name(), dataType(), field, qualifier(), nullable(), id(), synthetic()); } public FieldAttribute parent() { @@ -103,8 +108,8 @@ public class FieldAttribute extends TypedAttribute { } @Override - protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, - ExpressionId id, boolean synthetic) { + protected Attribute clone(Source source, String name, DataType type, String qualifier, + Nullability nullability, ExpressionId id, boolean synthetic) { FieldAttribute qualifiedParent = parent != null ? (FieldAttribute) parent.withQualifier(qualifier) : null; return new FieldAttribute(source, qualifiedParent, name, field, qualifier, nullability, id, synthetic); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Literal.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Literal.java index b4ccd7eb9ffd..b22483bda365 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Literal.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Literal.java @@ -77,7 +77,7 @@ public class Literal extends NamedExpression { @Override public Attribute toAttribute() { - return new LiteralAttribute(source(), name(), null, nullable(), id(), false, dataType, this); + return new LiteralAttribute(source(), name(), dataType, null, nullable(), id(), false, this); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LiteralAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LiteralAttribute.java index 1305240b6098..506f3f8a0732 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LiteralAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/LiteralAttribute.java @@ -14,8 +14,8 @@ public class LiteralAttribute extends TypedAttribute { private final Literal literal; - public LiteralAttribute(Source source, String name, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic, - DataType dataType, Literal literal) { + public LiteralAttribute(Source source, String name, DataType dataType, String qualifier, Nullability nullability, ExpressionId id, + boolean synthetic, Literal literal) { super(source, name, dataType, qualifier, nullability, id, synthetic); this.literal = literal; } @@ -23,13 +23,13 @@ public class LiteralAttribute extends TypedAttribute { @Override protected NodeInfo info() { return NodeInfo.create(this, LiteralAttribute::new, - name(), qualifier(), nullable(), id(), synthetic(), dataType(), literal); + name(), dataType(), qualifier(), nullable(), id(), synthetic(), literal); } @Override - protected LiteralAttribute clone(Source source, String name, String qualifier, Nullability nullability, + protected LiteralAttribute clone(Source source, String name, DataType dataType, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { - return new LiteralAttribute(source, name, qualifier, nullability, id, synthetic, dataType(), literal); + return new LiteralAttribute(source, name, dataType, qualifier, nullability, id, synthetic, literal); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttribute.java index 476c69fea095..add7f702e04d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttribute.java @@ -65,7 +65,7 @@ public class UnresolvedAttribute extends Attribute implements Unresolvable { } @Override - protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, + protected Attribute clone(Source source, String name, DataType dataType, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { return this; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/ScoreAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/ScoreAttribute.java index bcd0aab16c63..7d93db3d862f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/ScoreAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/ScoreAttribute.java @@ -41,9 +41,9 @@ public class ScoreAttribute extends FunctionAttribute { } @Override - protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, + protected Attribute clone(Source source, String name, DataType dataType, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { - return new ScoreAttribute(source, name, dataType(), qualifier, nullability, id, synthetic); + return new ScoreAttribute(source, name, dataType, qualifier, nullability, id, synthetic); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java index 177f598dc9a4..59b4f345a4a6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunction.java @@ -28,7 +28,7 @@ import static java.util.Collections.singletonList; public abstract class AggregateFunction extends Function { private final Expression field; - private final List parameters; + private final List parameters; private AggregateFunctionAttribute lazyAttribute; @@ -36,7 +36,7 @@ public abstract class AggregateFunction extends Function { this(source, field, emptyList()); } - protected AggregateFunction(Source source, Expression field, List parameters) { + protected AggregateFunction(Source source, Expression field, List parameters) { super(source, CollectionUtils.combine(singletonList(field), parameters)); this.field = field; this.parameters = parameters; @@ -46,7 +46,7 @@ public abstract class AggregateFunction extends Function { return field; } - public List parameters() { + public List parameters() { return parameters; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunctionAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunctionAttribute.java index 96f072acda56..0bd0c9199bcb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunctionAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/AggregateFunctionAttribute.java @@ -60,10 +60,11 @@ public class AggregateFunctionAttribute extends FunctionAttribute { } @Override - protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { + protected Attribute clone(Source source, String name, DataType dataType, String qualifier, Nullability nullability, ExpressionId id, + boolean synthetic) { // this is highly correlated with QueryFolder$FoldAggregate#addFunction (regarding the function name within the querydsl) // that is the functionId is actually derived from the expression id to easily track it across contexts - return new AggregateFunctionAttribute(source, name, dataType(), qualifier, nullability, id, synthetic, functionId(), innerId, + return new AggregateFunctionAttribute(source, name, dataType, qualifier, nullability, id, synthetic, functionId(), innerId, propertyPath); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunctionAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunctionAttribute.java index c33c893141b6..2fed4cf30608 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunctionAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/GroupingFunctionAttribute.java @@ -37,11 +37,11 @@ public class GroupingFunctionAttribute extends FunctionAttribute { } @Override - protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, + protected Attribute clone(Source source, String name, DataType dataType, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { // this is highly correlated with QueryFolder$FoldAggregate#addFunction (regarding the function name within the querydsl) // that is the functionId is actually derived from the expression id to easily track it across contexts - return new GroupingFunctionAttribute(source, name, dataType(), qualifier, nullability, id, synthetic, functionId()); + return new GroupingFunctionAttribute(source, name, dataType, qualifier, nullability, id, synthetic, functionId()); } public GroupingFunctionAttribute withFunctionId(String functionId, String propertyPath) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunctionAttribute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunctionAttribute.java index 6a0980c2690d..67324ba466ca 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunctionAttribute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunctionAttribute.java @@ -66,9 +66,9 @@ public class ScalarFunctionAttribute extends FunctionAttribute { } @Override - protected Attribute clone(Source source, String name, String qualifier, Nullability nullability, + protected Attribute clone(Source source, String name, DataType dataType, String qualifier, Nullability nullability, ExpressionId id, boolean synthetic) { - return new ScalarFunctionAttribute(source, name, dataType(), qualifier, nullability, + return new ScalarFunctionAttribute(source, name, dataType, qualifier, nullability, id, synthetic, functionId(), script, orderBy, pipe); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java index 3006a08fad20..a43ffe745d26 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/GeoShape.java @@ -65,7 +65,7 @@ public class GeoShape implements ToXContentFragment, NamedWriteable { try { shape = parse(value); } catch (ParseException ex) { - throw new SqlIllegalArgumentException("Cannot parse [" + value + "] as a geo_shape value", ex); + throw new SqlIllegalArgumentException("Cannot parse [" + value + "] as a geo_shape or shape value", ex); } } @@ -74,7 +74,7 @@ public class GeoShape implements ToXContentFragment, NamedWriteable { try { shape = parse(value); } catch (ParseException ex) { - throw new SqlIllegalArgumentException("Cannot parse [" + value + "] as a geo_shape value", ex); + throw new SqlIllegalArgumentException("Cannot parse [" + value + "] as a geo_shape or shape value", ex); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java index 6689a33b1627..e702c4ecdbb4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.NamedExpression; import org.elasticsearch.xpack.sql.expression.Nullability; import org.elasticsearch.xpack.sql.expression.Order; +import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute; import org.elasticsearch.xpack.sql.expression.function.Functions; @@ -72,6 +73,7 @@ import org.elasticsearch.xpack.sql.plan.logical.Limit; import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.OrderBy; +import org.elasticsearch.xpack.sql.plan.logical.Pivot; import org.elasticsearch.xpack.sql.plan.logical.Project; import org.elasticsearch.xpack.sql.plan.logical.SubQueryAlias; import org.elasticsearch.xpack.sql.plan.logical.UnaryPlan; @@ -96,6 +98,7 @@ import java.util.Map.Entry; import java.util.Set; import java.util.function.Consumer; +import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.sql.expression.Expressions.equalsAsAttribute; import static org.elasticsearch.xpack.sql.expression.Literal.FALSE; import static org.elasticsearch.xpack.sql.expression.Literal.TRUE; @@ -120,6 +123,9 @@ public class Optimizer extends RuleExecutor { @Override protected Iterable.Batch> batches() { + Batch pivot = new Batch("Pivot Rewrite", Limiter.ONCE, + new RewritePivot()); + Batch operators = new Batch("Operator Optimization", new PruneDuplicatesInGroupBy(), // combining @@ -170,9 +176,40 @@ public class Optimizer extends RuleExecutor { CleanAliases.INSTANCE, new SetAsOptimized()); - return Arrays.asList(operators, aggregate, local, label); + return Arrays.asList(pivot, operators, aggregate, local, label); } + static class RewritePivot extends OptimizerRule { + + @Override + protected LogicalPlan rule(Pivot plan) { + // 1. add the IN filter + List rawValues = new ArrayList<>(plan.values().size()); + for (NamedExpression namedExpression : plan.values()) { + // everything should have resolved to an alias + if (namedExpression instanceof Alias) { + rawValues.add(((Alias) namedExpression).child()); + } + // TODO: this should be removed when refactoring NamedExpression + else if (namedExpression instanceof Literal) { + rawValues.add(namedExpression); + } + // TODO: NamedExpression refactoring should remove this + else if (namedExpression.foldable()) { + rawValues.add(Literal.of(namedExpression.name(), namedExpression)); + } + // TODO: same as above + else { + UnresolvedAttribute attr = new UnresolvedAttribute(namedExpression.source(), namedExpression.name(), null, + "Unexpected alias"); + return new Pivot(plan.source(), plan.child(), plan.column(), singletonList(attr), plan.aggregates()); + } + } + Filter filter = new Filter(plan.source(), plan.child(), new In(plan.source(), plan.column(), rawValues)); + // 2. preserve the PIVOT + return new Pivot(plan.source(), filter, plan.column(), plan.values(), plan.aggregates()); + } + } static class PruneDuplicatesInGroupBy extends OptimizerRule { @@ -1038,7 +1075,14 @@ public class Optimizer extends RuleExecutor { Aggregate a = (Aggregate) child; return new Aggregate(a.source(), a.child(), a.groupings(), combineProjections(project.projections(), a.aggregates())); } - + // if the pivot custom columns are not used, convert the project + pivot into a GROUP BY/Aggregate + if (child instanceof Pivot) { + Pivot p = (Pivot) child; + if (project.outputSet().subsetOf(p.groupingSet())) { + return new Aggregate(p.source(), p.child(), new ArrayList<>(project.projections()), project.projections()); + } + } + // TODO: add rule for combining Agg/Pivot with underlying project return project; } @@ -1172,7 +1216,7 @@ public class Optimizer extends RuleExecutor { return Literal.of(in, null); } - } else if (e instanceof Alias == false + } else if (e instanceof Alias == false && e.nullable() == Nullability.TRUE && Expressions.anyMatch(e.children(), Expressions::isNull)) { return Literal.of(e, null); @@ -1976,7 +2020,8 @@ public class Optimizer extends RuleExecutor { } } else if (n.foldable()) { values.add(n.fold()); - } else { + } + else { // not everything is foldable, bail-out early return values; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java index 429e572878f5..9a663994ccf1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java @@ -8,11 +8,13 @@ package org.elasticsearch.xpack.sql.parser; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.tree.TerminalNode; +import org.elasticsearch.xpack.sql.expression.Alias; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.NamedExpression; import org.elasticsearch.xpack.sql.expression.Order; import org.elasticsearch.xpack.sql.expression.UnresolvedAlias; +import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.AliasedQueryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.AliasedRelationContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FromClauseContext; @@ -22,7 +24,10 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinCriteriaContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinRelationContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LimitClauseContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.NamedQueryContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.NamedValueExpressionContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.OrderByContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PivotArgsContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PivotClauseContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryNoWithContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QuerySpecificationContext; @@ -39,20 +44,22 @@ import org.elasticsearch.xpack.sql.plan.logical.Limit; import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.OrderBy; +import org.elasticsearch.xpack.sql.plan.logical.Pivot; import org.elasticsearch.xpack.sql.plan.logical.Project; import org.elasticsearch.xpack.sql.plan.logical.SubQueryAlias; import org.elasticsearch.xpack.sql.plan.logical.UnresolvedRelation; import org.elasticsearch.xpack.sql.plan.logical.With; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.session.SingletonExecutable; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataType; +import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import static java.util.Collections.emptyList; -import static java.util.stream.Collectors.toList; abstract class LogicalPlanBuilder extends ExpressionBuilder { @@ -119,14 +126,8 @@ abstract class LogicalPlanBuilder extends ExpressionBuilder { query = new Filter(source(ctx), query, expression(ctx.where)); } - List selectTarget = emptyList(); - - // SELECT a, b, c ... - if (!ctx.selectItem().isEmpty()) { - selectTarget = expressions(ctx.selectItem()).stream() - .map(e -> (e instanceof NamedExpression) ? (NamedExpression) e : new UnresolvedAlias(e.source(), e)) - .collect(toList()); - } + List selectTarget = ctx.selectItems().isEmpty() ? emptyList() : visitList(ctx.selectItems().selectItem(), + NamedExpression.class); // GROUP BY GroupByContext groupByCtx = ctx.groupBy(); @@ -142,7 +143,7 @@ abstract class LogicalPlanBuilder extends ExpressionBuilder { query = new Aggregate(source(ctx.GROUP(), endSource), query, groupBy, selectTarget); } else if (!selectTarget.isEmpty()) { - query = new Project(source(ctx.selectItem(0)), query, selectTarget); + query = new Project(source(ctx.selectItems()), query, selectTarget); } // HAVING @@ -160,9 +161,37 @@ abstract class LogicalPlanBuilder extends ExpressionBuilder { public LogicalPlan visitFromClause(FromClauseContext ctx) { // if there are multiple FROM clauses, convert each pair in a inner join List plans = plans(ctx.relation()); - return plans.stream() + LogicalPlan plan = plans.stream() .reduce((left, right) -> new Join(source(ctx), left, right, Join.JoinType.IMPLICIT, null)) .get(); + + // PIVOT + if (ctx.pivotClause() != null) { + PivotClauseContext pivotClause = ctx.pivotClause(); + UnresolvedAttribute column = new UnresolvedAttribute(source(pivotClause.column), visitQualifiedName(pivotClause.column)); + List values = namedValues(pivotClause.aggs); + if (values.size() > 1) { + throw new ParsingException(source(pivotClause.aggs), "PIVOT currently supports only one aggregation, found [{}]", + values.size()); + } + plan = new Pivot(source(pivotClause), plan, column, namedValues(pivotClause.vals), namedValues(pivotClause.aggs)); + } + return plan; + } + + private List namedValues(PivotArgsContext args) { + if (args == null || args.isEmpty()) { + return emptyList(); + } + List values = new ArrayList<>(); + + for (NamedValueExpressionContext value : args.namedValueExpression()) { + Expression exp = expression(value.valueExpression()); + String alias = visitIdentifier(value.identifier()); + Source source = source(value); + values.add(alias != null ? new Alias(source, alias, exp) : new UnresolvedAlias(source, exp)); + } + return values; } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java index 9e8dd6cd6af4..15531de70364 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java @@ -311,6 +311,18 @@ class SqlBaseBaseListener implements SqlBaseListener { *

    The default implementation does nothing.

    */ @Override public void exitSetQuantifier(SqlBaseParser.SetQuantifierContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterSelectItems(SqlBaseParser.SelectItemsContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitSelectItems(SqlBaseParser.SelectItemsContext ctx) { } /** * {@inheritDoc} * @@ -407,6 +419,42 @@ class SqlBaseBaseListener implements SqlBaseListener { *

    The default implementation does nothing.

    */ @Override public void exitAliasedRelation(SqlBaseParser.AliasedRelationContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterPivotClause(SqlBaseParser.PivotClauseContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitPivotClause(SqlBaseParser.PivotClauseContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterPivotArgs(SqlBaseParser.PivotArgsContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitPivotArgs(SqlBaseParser.PivotArgsContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterNamedValueExpression(SqlBaseParser.NamedValueExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitNamedValueExpression(SqlBaseParser.NamedValueExpressionContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java index 199fb407698d..dc05e66c1e74 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java @@ -186,6 +186,13 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitSetQuantifier(SqlBaseParser.SetQuantifierContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitSelectItems(SqlBaseParser.SelectItemsContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -242,6 +249,27 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitAliasedRelation(SqlBaseParser.AliasedRelationContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitPivotClause(SqlBaseParser.PivotClauseContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitPivotArgs(SqlBaseParser.PivotArgsContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitNamedValueExpression(SqlBaseParser.NamedValueExpressionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java index de8afac15268..cba3c1ee9a39 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java @@ -22,21 +22,22 @@ class SqlBaseLexer extends Lexer { COLUMNS=18, CONVERT=19, CURRENT_DATE=20, CURRENT_TIME=21, CURRENT_TIMESTAMP=22, DAY=23, DAYS=24, DEBUG=25, DESC=26, DESCRIBE=27, DISTINCT=28, ELSE=29, END=30, ESCAPE=31, EXECUTABLE=32, EXISTS=33, EXPLAIN=34, EXTRACT=35, FALSE=36, - FIRST=37, FORMAT=38, FROM=39, FROZEN=40, FULL=41, FUNCTIONS=42, GRAPHVIZ=43, - GROUP=44, HAVING=45, HOUR=46, HOURS=47, IN=48, INCLUDE=49, INNER=50, INTERVAL=51, - IS=52, JOIN=53, LAST=54, LEFT=55, LIKE=56, LIMIT=57, MAPPED=58, MATCH=59, - MINUTE=60, MINUTES=61, MONTH=62, MONTHS=63, NATURAL=64, NOT=65, NULL=66, - NULLS=67, ON=68, OPTIMIZED=69, OR=70, ORDER=71, OUTER=72, PARSED=73, PHYSICAL=74, - PLAN=75, RIGHT=76, RLIKE=77, QUERY=78, SCHEMAS=79, SECOND=80, SECONDS=81, - SELECT=82, SHOW=83, SYS=84, TABLE=85, TABLES=86, TEXT=87, THEN=88, TRUE=89, - TO=90, TYPE=91, TYPES=92, USING=93, VERIFY=94, WHEN=95, WHERE=96, WITH=97, - YEAR=98, YEARS=99, ESCAPE_ESC=100, FUNCTION_ESC=101, LIMIT_ESC=102, DATE_ESC=103, - TIME_ESC=104, TIMESTAMP_ESC=105, GUID_ESC=106, ESC_END=107, EQ=108, NULLEQ=109, - NEQ=110, LT=111, LTE=112, GT=113, GTE=114, PLUS=115, MINUS=116, ASTERISK=117, - SLASH=118, PERCENT=119, CAST_OP=120, CONCAT=121, DOT=122, PARAM=123, STRING=124, - INTEGER_VALUE=125, DECIMAL_VALUE=126, IDENTIFIER=127, DIGIT_IDENTIFIER=128, - TABLE_IDENTIFIER=129, QUOTED_IDENTIFIER=130, BACKQUOTED_IDENTIFIER=131, - SIMPLE_COMMENT=132, BRACKETED_COMMENT=133, WS=134, UNRECOGNIZED=135; + FIRST=37, FOR=38, FORMAT=39, FROM=40, FROZEN=41, FULL=42, FUNCTIONS=43, + GRAPHVIZ=44, GROUP=45, HAVING=46, HOUR=47, HOURS=48, IN=49, INCLUDE=50, + INNER=51, INTERVAL=52, IS=53, JOIN=54, LAST=55, LEFT=56, LIKE=57, LIMIT=58, + MAPPED=59, MATCH=60, MINUTE=61, MINUTES=62, MONTH=63, MONTHS=64, NATURAL=65, + NOT=66, NULL=67, NULLS=68, ON=69, OPTIMIZED=70, OR=71, ORDER=72, OUTER=73, + PARSED=74, PHYSICAL=75, PIVOT=76, PLAN=77, RIGHT=78, RLIKE=79, QUERY=80, + SCHEMAS=81, SECOND=82, SECONDS=83, SELECT=84, SHOW=85, SYS=86, TABLE=87, + TABLES=88, TEXT=89, THEN=90, TRUE=91, TO=92, TYPE=93, TYPES=94, USING=95, + VERIFY=96, WHEN=97, WHERE=98, WITH=99, YEAR=100, YEARS=101, ESCAPE_ESC=102, + FUNCTION_ESC=103, LIMIT_ESC=104, DATE_ESC=105, TIME_ESC=106, TIMESTAMP_ESC=107, + GUID_ESC=108, ESC_END=109, EQ=110, NULLEQ=111, NEQ=112, LT=113, LTE=114, + GT=115, GTE=116, PLUS=117, MINUS=118, ASTERISK=119, SLASH=120, PERCENT=121, + CAST_OP=122, CONCAT=123, DOT=124, PARAM=125, STRING=126, INTEGER_VALUE=127, + DECIMAL_VALUE=128, IDENTIFIER=129, DIGIT_IDENTIFIER=130, TABLE_IDENTIFIER=131, + QUOTED_IDENTIFIER=132, BACKQUOTED_IDENTIFIER=133, SIMPLE_COMMENT=134, + BRACKETED_COMMENT=135, WS=136, UNRECOGNIZED=137; public static String[] modeNames = { "DEFAULT_MODE" }; @@ -46,21 +47,22 @@ class SqlBaseLexer extends Lexer { "AS", "ASC", "BETWEEN", "BY", "CASE", "CAST", "CATALOG", "CATALOGS", "COLUMNS", "CONVERT", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "DAY", "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ELSE", "END", "ESCAPE", - "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", - "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", - "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", "JOIN", "LAST", - "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", - "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", - "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", - "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", - "TEXT", "THEN", "TRUE", "TO", "TYPE", "TYPES", "USING", "VERIFY", "WHEN", - "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", - "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", - "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "CAST_OP", "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", - "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", - "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "EXPONENT", "DIGIT", "LETTER", - "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" + "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FOR", + "FORMAT", "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", + "HAVING", "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", + "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", + "MINUTES", "MONTH", "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", + "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PIVOT", "PLAN", + "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", + "SYS", "TABLE", "TABLES", "TEXT", "THEN", "TRUE", "TO", "TYPE", "TYPES", + "USING", "VERIFY", "WHEN", "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", + "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", + "GUID_ESC", "ESC_END", "EQ", "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", + "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CAST_OP", "CONCAT", + "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", + "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", + "EXPONENT", "DIGIT", "LETTER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", + "WS", "UNRECOGNIZED" }; private static final String[] _LITERAL_NAMES = { @@ -69,40 +71,40 @@ class SqlBaseLexer extends Lexer { "'CATALOG'", "'CATALOGS'", "'COLUMNS'", "'CONVERT'", "'CURRENT_DATE'", "'CURRENT_TIME'", "'CURRENT_TIMESTAMP'", "'DAY'", "'DAYS'", "'DEBUG'", "'DESC'", "'DESCRIBE'", "'DISTINCT'", "'ELSE'", "'END'", "'ESCAPE'", "'EXECUTABLE'", - "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", "'FORMAT'", + "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", "'FOR'", "'FORMAT'", "'FROM'", "'FROZEN'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", "'GROUP'", "'HAVING'", "'HOUR'", "'HOURS'", "'IN'", "'INCLUDE'", "'INNER'", "'INTERVAL'", "'IS'", "'JOIN'", "'LAST'", "'LEFT'", "'LIKE'", "'LIMIT'", "'MAPPED'", "'MATCH'", "'MINUTE'", "'MINUTES'", "'MONTH'", "'MONTHS'", "'NATURAL'", "'NOT'", "'NULL'", "'NULLS'", "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", - "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PLAN'", "'RIGHT'", "'RLIKE'", "'QUERY'", - "'SCHEMAS'", "'SECOND'", "'SECONDS'", "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", - "'TABLES'", "'TEXT'", "'THEN'", "'TRUE'", "'TO'", "'TYPE'", "'TYPES'", - "'USING'", "'VERIFY'", "'WHEN'", "'WHERE'", "'WITH'", "'YEAR'", "'YEARS'", - "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'", "'{TS'", "'{GUID'", - "'}'", "'='", "'<=>'", null, "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", - "'*'", "'/'", "'%'", "'::'", "'||'", "'.'", "'?'" + "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PIVOT'", "'PLAN'", "'RIGHT'", "'RLIKE'", + "'QUERY'", "'SCHEMAS'", "'SECOND'", "'SECONDS'", "'SELECT'", "'SHOW'", + "'SYS'", "'TABLE'", "'TABLES'", "'TEXT'", "'THEN'", "'TRUE'", "'TO'", + "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", "'WHEN'", "'WHERE'", "'WITH'", + "'YEAR'", "'YEARS'", "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'", + "'{TS'", "'{GUID'", "'}'", "'='", "'<=>'", null, "'<'", "'<='", "'>'", + "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'::'", "'||'", "'.'", "'?'" }; private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", "AS", "ASC", "BETWEEN", "BY", "CASE", "CAST", "CATALOG", "CATALOGS", "COLUMNS", "CONVERT", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "DAY", "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ELSE", "END", "ESCAPE", - "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", - "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", - "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", "JOIN", "LAST", - "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", - "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", - "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", - "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", - "TEXT", "THEN", "TRUE", "TO", "TYPE", "TYPES", "USING", "VERIFY", "WHEN", - "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", - "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", - "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "CAST_OP", "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", - "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", - "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", - "WS", "UNRECOGNIZED" + "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FOR", + "FORMAT", "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", + "HAVING", "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", + "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", + "MINUTES", "MONTH", "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", + "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PIVOT", "PLAN", + "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", + "SYS", "TABLE", "TABLES", "TEXT", "THEN", "TRUE", "TO", "TYPE", "TYPES", + "USING", "VERIFY", "WHEN", "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", + "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", + "GUID_ESC", "ESC_END", "EQ", "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", + "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CAST_OP", "CONCAT", + "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", + "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", + "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -159,7 +161,7 @@ class SqlBaseLexer extends Lexer { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2\u0089\u0471\b\1\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2\u008b\u047f\b\1\4"+ "\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n"+ "\4\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+ "\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+ @@ -175,384 +177,391 @@ class SqlBaseLexer extends Lexer { "\4w\tw\4x\tx\4y\ty\4z\tz\4{\t{\4|\t|\4}\t}\4~\t~\4\177\t\177\4\u0080\t"+ "\u0080\4\u0081\t\u0081\4\u0082\t\u0082\4\u0083\t\u0083\4\u0084\t\u0084"+ "\4\u0085\t\u0085\4\u0086\t\u0086\4\u0087\t\u0087\4\u0088\t\u0088\4\u0089"+ - "\t\u0089\4\u008a\t\u008a\4\u008b\t\u008b\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3"+ - "\5\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b"+ - "\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\f\3"+ - "\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3"+ - "\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3\21\3"+ - "\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3"+ - "\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3"+ - "\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3"+ - "\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3"+ - "\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3"+ - "\27\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32\3"+ - "\32\3\32\3\33\3\33\3\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3"+ - "\34\3\34\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3"+ - "\36\3\36\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3!\3!\3!\3!\3!\3!\3"+ - "!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#\3#\3$\3"+ - "$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\3%\3&\3&\3&\3&\3&\3&\3\'\3\'\3\'\3"+ - "\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3)\3)\3)\3)\3)\3)\3)\3*\3*\3*\3*\3*\3+\3"+ - "+\3+\3+\3+\3+\3+\3+\3+\3+\3,\3,\3,\3,\3,\3,\3,\3,\3,\3-\3-\3-\3-\3-\3"+ - "-\3.\3.\3.\3.\3.\3.\3.\3/\3/\3/\3/\3/\3\60\3\60\3\60\3\60\3\60\3\60\3"+ - "\61\3\61\3\61\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3\63\3\63\3\63\3"+ - "\63\3\63\3\63\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\65\3\65\3"+ - "\65\3\66\3\66\3\66\3\66\3\66\3\67\3\67\3\67\3\67\3\67\38\38\38\38\38\3"+ - "9\39\39\39\39\3:\3:\3:\3:\3:\3:\3;\3;\3;\3;\3;\3;\3;\3<\3<\3<\3<\3<\3"+ - "<\3=\3=\3=\3=\3=\3=\3=\3>\3>\3>\3>\3>\3>\3>\3>\3?\3?\3?\3?\3?\3?\3@\3"+ - "@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3A\3A\3A\3B\3B\3B\3B\3C\3C\3C\3C\3C\3"+ - "D\3D\3D\3D\3D\3D\3E\3E\3E\3F\3F\3F\3F\3F\3F\3F\3F\3F\3F\3G\3G\3G\3H\3"+ - "H\3H\3H\3H\3H\3I\3I\3I\3I\3I\3I\3J\3J\3J\3J\3J\3J\3J\3K\3K\3K\3K\3K\3"+ - "K\3K\3K\3K\3L\3L\3L\3L\3L\3M\3M\3M\3M\3M\3M\3N\3N\3N\3N\3N\3N\3O\3O\3"+ - "O\3O\3O\3O\3P\3P\3P\3P\3P\3P\3P\3P\3Q\3Q\3Q\3Q\3Q\3Q\3Q\3R\3R\3R\3R\3"+ - "R\3R\3R\3R\3S\3S\3S\3S\3S\3S\3S\3T\3T\3T\3T\3T\3U\3U\3U\3U\3V\3V\3V\3"+ - "V\3V\3V\3W\3W\3W\3W\3W\3W\3W\3X\3X\3X\3X\3X\3Y\3Y\3Y\3Y\3Y\3Z\3Z\3Z\3"+ - "Z\3Z\3[\3[\3[\3\\\3\\\3\\\3\\\3\\\3]\3]\3]\3]\3]\3]\3^\3^\3^\3^\3^\3^"+ - "\3_\3_\3_\3_\3_\3_\3_\3`\3`\3`\3`\3`\3a\3a\3a\3a\3a\3a\3b\3b\3b\3b\3b"+ - "\3c\3c\3c\3c\3c\3d\3d\3d\3d\3d\3d\3e\3e\3e\3e\3e\3e\3e\3e\3f\3f\3f\3f"+ - "\3g\3g\3g\3g\3g\3g\3g\3h\3h\3h\3i\3i\3i\3j\3j\3j\3j\3k\3k\3k\3k\3k\3k"+ - "\3l\3l\3m\3m\3n\3n\3n\3n\3o\3o\3o\3o\5o\u03af\no\3p\3p\3q\3q\3q\3r\3r"+ - "\3s\3s\3s\3t\3t\3u\3u\3v\3v\3w\3w\3x\3x\3y\3y\3y\3z\3z\3z\3{\3{\3|\3|"+ - "\3}\3}\3}\3}\7}\u03d3\n}\f}\16}\u03d6\13}\3}\3}\3~\6~\u03db\n~\r~\16~"+ - "\u03dc\3\177\6\177\u03e0\n\177\r\177\16\177\u03e1\3\177\3\177\7\177\u03e6"+ - "\n\177\f\177\16\177\u03e9\13\177\3\177\3\177\6\177\u03ed\n\177\r\177\16"+ - "\177\u03ee\3\177\6\177\u03f2\n\177\r\177\16\177\u03f3\3\177\3\177\7\177"+ - "\u03f8\n\177\f\177\16\177\u03fb\13\177\5\177\u03fd\n\177\3\177\3\177\3"+ - "\177\3\177\6\177\u0403\n\177\r\177\16\177\u0404\3\177\3\177\5\177\u0409"+ - "\n\177\3\u0080\3\u0080\5\u0080\u040d\n\u0080\3\u0080\3\u0080\3\u0080\7"+ - "\u0080\u0412\n\u0080\f\u0080\16\u0080\u0415\13\u0080\3\u0081\3\u0081\3"+ - "\u0081\3\u0081\6\u0081\u041b\n\u0081\r\u0081\16\u0081\u041c\3\u0082\3"+ - "\u0082\3\u0082\6\u0082\u0422\n\u0082\r\u0082\16\u0082\u0423\3\u0083\3"+ - "\u0083\3\u0083\3\u0083\7\u0083\u042a\n\u0083\f\u0083\16\u0083\u042d\13"+ - "\u0083\3\u0083\3\u0083\3\u0084\3\u0084\3\u0084\3\u0084\7\u0084\u0435\n"+ - "\u0084\f\u0084\16\u0084\u0438\13\u0084\3\u0084\3\u0084\3\u0085\3\u0085"+ - "\5\u0085\u043e\n\u0085\3\u0085\6\u0085\u0441\n\u0085\r\u0085\16\u0085"+ - "\u0442\3\u0086\3\u0086\3\u0087\3\u0087\3\u0088\3\u0088\3\u0088\3\u0088"+ - "\7\u0088\u044d\n\u0088\f\u0088\16\u0088\u0450\13\u0088\3\u0088\5\u0088"+ - "\u0453\n\u0088\3\u0088\5\u0088\u0456\n\u0088\3\u0088\3\u0088\3\u0089\3"+ - "\u0089\3\u0089\3\u0089\3\u0089\7\u0089\u045f\n\u0089\f\u0089\16\u0089"+ - "\u0462\13\u0089\3\u0089\3\u0089\3\u0089\3\u0089\3\u0089\3\u008a\6\u008a"+ - "\u046a\n\u008a\r\u008a\16\u008a\u046b\3\u008a\3\u008a\3\u008b\3\u008b"+ - "\3\u0460\2\u008c\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31"+ - "\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65"+ - "\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64"+ - "g\65i\66k\67m8o9q:s;u{?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089"+ - "F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095L\u0097M\u0099N\u009bO\u009d"+ - "P\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9V\u00abW\u00adX\u00afY\u00b1"+ - "Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd`\u00bfa\u00c1b\u00c3c\u00c5"+ - "d\u00c7e\u00c9f\u00cbg\u00cdh\u00cfi\u00d1j\u00d3k\u00d5l\u00d7m\u00d9"+ - "n\u00dbo\u00ddp\u00dfq\u00e1r\u00e3s\u00e5t\u00e7u\u00e9v\u00ebw\u00ed"+ - "x\u00efy\u00f1z\u00f3{\u00f5|\u00f7}\u00f9~\u00fb\177\u00fd\u0080\u00ff"+ - "\u0081\u0101\u0082\u0103\u0083\u0105\u0084\u0107\u0085\u0109\2\u010b\2"+ - "\u010d\2\u010f\u0086\u0111\u0087\u0113\u0088\u0115\u0089\3\2\13\3\2))"+ - "\4\2BBaa\3\2$$\3\2bb\4\2--//\3\2\62;\3\2C\\\4\2\f\f\17\17\5\2\13\f\17"+ - "\17\"\"\u0491\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2"+ - "\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2"+ - "\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2"+ - "\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2"+ - "\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3"+ - "\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2"+ - "\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2"+ - "S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3"+ - "\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2\2\2i\3\2\2\2\2k\3\2\2"+ - "\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\2"+ - "y\3\2\2\2\2{\3\2\2\2\2}\3\2\2\2\2\177\3\2\2\2\2\u0081\3\2\2\2\2\u0083"+ - "\3\2\2\2\2\u0085\3\2\2\2\2\u0087\3\2\2\2\2\u0089\3\2\2\2\2\u008b\3\2\2"+ - "\2\2\u008d\3\2\2\2\2\u008f\3\2\2\2\2\u0091\3\2\2\2\2\u0093\3\2\2\2\2\u0095"+ - "\3\2\2\2\2\u0097\3\2\2\2\2\u0099\3\2\2\2\2\u009b\3\2\2\2\2\u009d\3\2\2"+ - "\2\2\u009f\3\2\2\2\2\u00a1\3\2\2\2\2\u00a3\3\2\2\2\2\u00a5\3\2\2\2\2\u00a7"+ - "\3\2\2\2\2\u00a9\3\2\2\2\2\u00ab\3\2\2\2\2\u00ad\3\2\2\2\2\u00af\3\2\2"+ - "\2\2\u00b1\3\2\2\2\2\u00b3\3\2\2\2\2\u00b5\3\2\2\2\2\u00b7\3\2\2\2\2\u00b9"+ - "\3\2\2\2\2\u00bb\3\2\2\2\2\u00bd\3\2\2\2\2\u00bf\3\2\2\2\2\u00c1\3\2\2"+ - "\2\2\u00c3\3\2\2\2\2\u00c5\3\2\2\2\2\u00c7\3\2\2\2\2\u00c9\3\2\2\2\2\u00cb"+ - "\3\2\2\2\2\u00cd\3\2\2\2\2\u00cf\3\2\2\2\2\u00d1\3\2\2\2\2\u00d3\3\2\2"+ - "\2\2\u00d5\3\2\2\2\2\u00d7\3\2\2\2\2\u00d9\3\2\2\2\2\u00db\3\2\2\2\2\u00dd"+ - "\3\2\2\2\2\u00df\3\2\2\2\2\u00e1\3\2\2\2\2\u00e3\3\2\2\2\2\u00e5\3\2\2"+ - "\2\2\u00e7\3\2\2\2\2\u00e9\3\2\2\2\2\u00eb\3\2\2\2\2\u00ed\3\2\2\2\2\u00ef"+ - "\3\2\2\2\2\u00f1\3\2\2\2\2\u00f3\3\2\2\2\2\u00f5\3\2\2\2\2\u00f7\3\2\2"+ - "\2\2\u00f9\3\2\2\2\2\u00fb\3\2\2\2\2\u00fd\3\2\2\2\2\u00ff\3\2\2\2\2\u0101"+ - "\3\2\2\2\2\u0103\3\2\2\2\2\u0105\3\2\2\2\2\u0107\3\2\2\2\2\u010f\3\2\2"+ - "\2\2\u0111\3\2\2\2\2\u0113\3\2\2\2\2\u0115\3\2\2\2\3\u0117\3\2\2\2\5\u0119"+ - "\3\2\2\2\7\u011b\3\2\2\2\t\u011d\3\2\2\2\13\u011f\3\2\2\2\r\u0123\3\2"+ - "\2\2\17\u012b\3\2\2\2\21\u0134\3\2\2\2\23\u0138\3\2\2\2\25\u013c\3\2\2"+ - "\2\27\u013f\3\2\2\2\31\u0143\3\2\2\2\33\u014b\3\2\2\2\35\u014e\3\2\2\2"+ - "\37\u0153\3\2\2\2!\u0158\3\2\2\2#\u0160\3\2\2\2%\u0169\3\2\2\2\'\u0171"+ - "\3\2\2\2)\u0179\3\2\2\2+\u0186\3\2\2\2-\u0193\3\2\2\2/\u01a5\3\2\2\2\61"+ - "\u01a9\3\2\2\2\63\u01ae\3\2\2\2\65\u01b4\3\2\2\2\67\u01b9\3\2\2\29\u01c2"+ - "\3\2\2\2;\u01cb\3\2\2\2=\u01d0\3\2\2\2?\u01d4\3\2\2\2A\u01db\3\2\2\2C"+ - "\u01e6\3\2\2\2E\u01ed\3\2\2\2G\u01f5\3\2\2\2I\u01fd\3\2\2\2K\u0203\3\2"+ - "\2\2M\u0209\3\2\2\2O\u0210\3\2\2\2Q\u0215\3\2\2\2S\u021c\3\2\2\2U\u0221"+ - "\3\2\2\2W\u022b\3\2\2\2Y\u0234\3\2\2\2[\u023a\3\2\2\2]\u0241\3\2\2\2_"+ - "\u0246\3\2\2\2a\u024c\3\2\2\2c\u024f\3\2\2\2e\u0257\3\2\2\2g\u025d\3\2"+ - "\2\2i\u0266\3\2\2\2k\u0269\3\2\2\2m\u026e\3\2\2\2o\u0273\3\2\2\2q\u0278"+ - "\3\2\2\2s\u027d\3\2\2\2u\u0283\3\2\2\2w\u028a\3\2\2\2y\u0290\3\2\2\2{"+ - "\u0297\3\2\2\2}\u029f\3\2\2\2\177\u02a5\3\2\2\2\u0081\u02ac\3\2\2\2\u0083"+ - "\u02b4\3\2\2\2\u0085\u02b8\3\2\2\2\u0087\u02bd\3\2\2\2\u0089\u02c3\3\2"+ - "\2\2\u008b\u02c6\3\2\2\2\u008d\u02d0\3\2\2\2\u008f\u02d3\3\2\2\2\u0091"+ - "\u02d9\3\2\2\2\u0093\u02df\3\2\2\2\u0095\u02e6\3\2\2\2\u0097\u02ef\3\2"+ - "\2\2\u0099\u02f4\3\2\2\2\u009b\u02fa\3\2\2\2\u009d\u0300\3\2\2\2\u009f"+ - "\u0306\3\2\2\2\u00a1\u030e\3\2\2\2\u00a3\u0315\3\2\2\2\u00a5\u031d\3\2"+ - "\2\2\u00a7\u0324\3\2\2\2\u00a9\u0329\3\2\2\2\u00ab\u032d\3\2\2\2\u00ad"+ - "\u0333\3\2\2\2\u00af\u033a\3\2\2\2\u00b1\u033f\3\2\2\2\u00b3\u0344\3\2"+ - "\2\2\u00b5\u0349\3\2\2\2\u00b7\u034c\3\2\2\2\u00b9\u0351\3\2\2\2\u00bb"+ - "\u0357\3\2\2\2\u00bd\u035d\3\2\2\2\u00bf\u0364\3\2\2\2\u00c1\u0369\3\2"+ - "\2\2\u00c3\u036f\3\2\2\2\u00c5\u0374\3\2\2\2\u00c7\u0379\3\2\2\2\u00c9"+ - "\u037f\3\2\2\2\u00cb\u0387\3\2\2\2\u00cd\u038b\3\2\2\2\u00cf\u0392\3\2"+ - "\2\2\u00d1\u0395\3\2\2\2\u00d3\u0398\3\2\2\2\u00d5\u039c\3\2\2\2\u00d7"+ - "\u03a2\3\2\2\2\u00d9\u03a4\3\2\2\2\u00db\u03a6\3\2\2\2\u00dd\u03ae\3\2"+ - "\2\2\u00df\u03b0\3\2\2\2\u00e1\u03b2\3\2\2\2\u00e3\u03b5\3\2\2\2\u00e5"+ - "\u03b7\3\2\2\2\u00e7\u03ba\3\2\2\2\u00e9\u03bc\3\2\2\2\u00eb\u03be\3\2"+ - "\2\2\u00ed\u03c0\3\2\2\2\u00ef\u03c2\3\2\2\2\u00f1\u03c4\3\2\2\2\u00f3"+ - "\u03c7\3\2\2\2\u00f5\u03ca\3\2\2\2\u00f7\u03cc\3\2\2\2\u00f9\u03ce\3\2"+ - "\2\2\u00fb\u03da\3\2\2\2\u00fd\u0408\3\2\2\2\u00ff\u040c\3\2\2\2\u0101"+ - "\u0416\3\2\2\2\u0103\u0421\3\2\2\2\u0105\u0425\3\2\2\2\u0107\u0430\3\2"+ - "\2\2\u0109\u043b\3\2\2\2\u010b\u0444\3\2\2\2\u010d\u0446\3\2\2\2\u010f"+ - "\u0448\3\2\2\2\u0111\u0459\3\2\2\2\u0113\u0469\3\2\2\2\u0115\u046f\3\2"+ - "\2\2\u0117\u0118\7*\2\2\u0118\4\3\2\2\2\u0119\u011a\7+\2\2\u011a\6\3\2"+ - "\2\2\u011b\u011c\7.\2\2\u011c\b\3\2\2\2\u011d\u011e\7<\2\2\u011e\n\3\2"+ - "\2\2\u011f\u0120\7C\2\2\u0120\u0121\7N\2\2\u0121\u0122\7N\2\2\u0122\f"+ - "\3\2\2\2\u0123\u0124\7C\2\2\u0124\u0125\7P\2\2\u0125\u0126\7C\2\2\u0126"+ - "\u0127\7N\2\2\u0127\u0128\7[\2\2\u0128\u0129\7\\\2\2\u0129\u012a\7G\2"+ - "\2\u012a\16\3\2\2\2\u012b\u012c\7C\2\2\u012c\u012d\7P\2\2\u012d\u012e"+ - "\7C\2\2\u012e\u012f\7N\2\2\u012f\u0130\7[\2\2\u0130\u0131\7\\\2\2\u0131"+ - "\u0132\7G\2\2\u0132\u0133\7F\2\2\u0133\20\3\2\2\2\u0134\u0135\7C\2\2\u0135"+ - "\u0136\7P\2\2\u0136\u0137\7F\2\2\u0137\22\3\2\2\2\u0138\u0139\7C\2\2\u0139"+ - "\u013a\7P\2\2\u013a\u013b\7[\2\2\u013b\24\3\2\2\2\u013c\u013d\7C\2\2\u013d"+ - "\u013e\7U\2\2\u013e\26\3\2\2\2\u013f\u0140\7C\2\2\u0140\u0141\7U\2\2\u0141"+ - "\u0142\7E\2\2\u0142\30\3\2\2\2\u0143\u0144\7D\2\2\u0144\u0145\7G\2\2\u0145"+ - "\u0146\7V\2\2\u0146\u0147\7Y\2\2\u0147\u0148\7G\2\2\u0148\u0149\7G\2\2"+ - "\u0149\u014a\7P\2\2\u014a\32\3\2\2\2\u014b\u014c\7D\2\2\u014c\u014d\7"+ - "[\2\2\u014d\34\3\2\2\2\u014e\u014f\7E\2\2\u014f\u0150\7C\2\2\u0150\u0151"+ - "\7U\2\2\u0151\u0152\7G\2\2\u0152\36\3\2\2\2\u0153\u0154\7E\2\2\u0154\u0155"+ - "\7C\2\2\u0155\u0156\7U\2\2\u0156\u0157\7V\2\2\u0157 \3\2\2\2\u0158\u0159"+ - "\7E\2\2\u0159\u015a\7C\2\2\u015a\u015b\7V\2\2\u015b\u015c\7C\2\2\u015c"+ - "\u015d\7N\2\2\u015d\u015e\7Q\2\2\u015e\u015f\7I\2\2\u015f\"\3\2\2\2\u0160"+ - "\u0161\7E\2\2\u0161\u0162\7C\2\2\u0162\u0163\7V\2\2\u0163\u0164\7C\2\2"+ - "\u0164\u0165\7N\2\2\u0165\u0166\7Q\2\2\u0166\u0167\7I\2\2\u0167\u0168"+ - "\7U\2\2\u0168$\3\2\2\2\u0169\u016a\7E\2\2\u016a\u016b\7Q\2\2\u016b\u016c"+ - "\7N\2\2\u016c\u016d\7W\2\2\u016d\u016e\7O\2\2\u016e\u016f\7P\2\2\u016f"+ - "\u0170\7U\2\2\u0170&\3\2\2\2\u0171\u0172\7E\2\2\u0172\u0173\7Q\2\2\u0173"+ - "\u0174\7P\2\2\u0174\u0175\7X\2\2\u0175\u0176\7G\2\2\u0176\u0177\7T\2\2"+ - "\u0177\u0178\7V\2\2\u0178(\3\2\2\2\u0179\u017a\7E\2\2\u017a\u017b\7W\2"+ - "\2\u017b\u017c\7T\2\2\u017c\u017d\7T\2\2\u017d\u017e\7G\2\2\u017e\u017f"+ - "\7P\2\2\u017f\u0180\7V\2\2\u0180\u0181\7a\2\2\u0181\u0182\7F\2\2\u0182"+ - "\u0183\7C\2\2\u0183\u0184\7V\2\2\u0184\u0185\7G\2\2\u0185*\3\2\2\2\u0186"+ - "\u0187\7E\2\2\u0187\u0188\7W\2\2\u0188\u0189\7T\2\2\u0189\u018a\7T\2\2"+ - "\u018a\u018b\7G\2\2\u018b\u018c\7P\2\2\u018c\u018d\7V\2\2\u018d\u018e"+ - "\7a\2\2\u018e\u018f\7V\2\2\u018f\u0190\7K\2\2\u0190\u0191\7O\2\2\u0191"+ - "\u0192\7G\2\2\u0192,\3\2\2\2\u0193\u0194\7E\2\2\u0194\u0195\7W\2\2\u0195"+ - "\u0196\7T\2\2\u0196\u0197\7T\2\2\u0197\u0198\7G\2\2\u0198\u0199\7P\2\2"+ - "\u0199\u019a\7V\2\2\u019a\u019b\7a\2\2\u019b\u019c\7V\2\2\u019c\u019d"+ - "\7K\2\2\u019d\u019e\7O\2\2\u019e\u019f\7G\2\2\u019f\u01a0\7U\2\2\u01a0"+ - "\u01a1\7V\2\2\u01a1\u01a2\7C\2\2\u01a2\u01a3\7O\2\2\u01a3\u01a4\7R\2\2"+ - "\u01a4.\3\2\2\2\u01a5\u01a6\7F\2\2\u01a6\u01a7\7C\2\2\u01a7\u01a8\7[\2"+ - "\2\u01a8\60\3\2\2\2\u01a9\u01aa\7F\2\2\u01aa\u01ab\7C\2\2\u01ab\u01ac"+ - "\7[\2\2\u01ac\u01ad\7U\2\2\u01ad\62\3\2\2\2\u01ae\u01af\7F\2\2\u01af\u01b0"+ - "\7G\2\2\u01b0\u01b1\7D\2\2\u01b1\u01b2\7W\2\2\u01b2\u01b3\7I\2\2\u01b3"+ - "\64\3\2\2\2\u01b4\u01b5\7F\2\2\u01b5\u01b6\7G\2\2\u01b6\u01b7\7U\2\2\u01b7"+ - "\u01b8\7E\2\2\u01b8\66\3\2\2\2\u01b9\u01ba\7F\2\2\u01ba\u01bb\7G\2\2\u01bb"+ - "\u01bc\7U\2\2\u01bc\u01bd\7E\2\2\u01bd\u01be\7T\2\2\u01be\u01bf\7K\2\2"+ - "\u01bf\u01c0\7D\2\2\u01c0\u01c1\7G\2\2\u01c18\3\2\2\2\u01c2\u01c3\7F\2"+ - "\2\u01c3\u01c4\7K\2\2\u01c4\u01c5\7U\2\2\u01c5\u01c6\7V\2\2\u01c6\u01c7"+ - "\7K\2\2\u01c7\u01c8\7P\2\2\u01c8\u01c9\7E\2\2\u01c9\u01ca\7V\2\2\u01ca"+ - ":\3\2\2\2\u01cb\u01cc\7G\2\2\u01cc\u01cd\7N\2\2\u01cd\u01ce\7U\2\2\u01ce"+ - "\u01cf\7G\2\2\u01cf<\3\2\2\2\u01d0\u01d1\7G\2\2\u01d1\u01d2\7P\2\2\u01d2"+ - "\u01d3\7F\2\2\u01d3>\3\2\2\2\u01d4\u01d5\7G\2\2\u01d5\u01d6\7U\2\2\u01d6"+ - "\u01d7\7E\2\2\u01d7\u01d8\7C\2\2\u01d8\u01d9\7R\2\2\u01d9\u01da\7G\2\2"+ - "\u01da@\3\2\2\2\u01db\u01dc\7G\2\2\u01dc\u01dd\7Z\2\2\u01dd\u01de\7G\2"+ - "\2\u01de\u01df\7E\2\2\u01df\u01e0\7W\2\2\u01e0\u01e1\7V\2\2\u01e1\u01e2"+ - "\7C\2\2\u01e2\u01e3\7D\2\2\u01e3\u01e4\7N\2\2\u01e4\u01e5\7G\2\2\u01e5"+ - "B\3\2\2\2\u01e6\u01e7\7G\2\2\u01e7\u01e8\7Z\2\2\u01e8\u01e9\7K\2\2\u01e9"+ - "\u01ea\7U\2\2\u01ea\u01eb\7V\2\2\u01eb\u01ec\7U\2\2\u01ecD\3\2\2\2\u01ed"+ - "\u01ee\7G\2\2\u01ee\u01ef\7Z\2\2\u01ef\u01f0\7R\2\2\u01f0\u01f1\7N\2\2"+ - "\u01f1\u01f2\7C\2\2\u01f2\u01f3\7K\2\2\u01f3\u01f4\7P\2\2\u01f4F\3\2\2"+ - "\2\u01f5\u01f6\7G\2\2\u01f6\u01f7\7Z\2\2\u01f7\u01f8\7V\2\2\u01f8\u01f9"+ - "\7T\2\2\u01f9\u01fa\7C\2\2\u01fa\u01fb\7E\2\2\u01fb\u01fc\7V\2\2\u01fc"+ - "H\3\2\2\2\u01fd\u01fe\7H\2\2\u01fe\u01ff\7C\2\2\u01ff\u0200\7N\2\2\u0200"+ - "\u0201\7U\2\2\u0201\u0202\7G\2\2\u0202J\3\2\2\2\u0203\u0204\7H\2\2\u0204"+ - "\u0205\7K\2\2\u0205\u0206\7T\2\2\u0206\u0207\7U\2\2\u0207\u0208\7V\2\2"+ - "\u0208L\3\2\2\2\u0209\u020a\7H\2\2\u020a\u020b\7Q\2\2\u020b\u020c\7T\2"+ - "\2\u020c\u020d\7O\2\2\u020d\u020e\7C\2\2\u020e\u020f\7V\2\2\u020fN\3\2"+ - "\2\2\u0210\u0211\7H\2\2\u0211\u0212\7T\2\2\u0212\u0213\7Q\2\2\u0213\u0214"+ - "\7O\2\2\u0214P\3\2\2\2\u0215\u0216\7H\2\2\u0216\u0217\7T\2\2\u0217\u0218"+ - "\7Q\2\2\u0218\u0219\7\\\2\2\u0219\u021a\7G\2\2\u021a\u021b\7P\2\2\u021b"+ - "R\3\2\2\2\u021c\u021d\7H\2\2\u021d\u021e\7W\2\2\u021e\u021f\7N\2\2\u021f"+ - "\u0220\7N\2\2\u0220T\3\2\2\2\u0221\u0222\7H\2\2\u0222\u0223\7W\2\2\u0223"+ - "\u0224\7P\2\2\u0224\u0225\7E\2\2\u0225\u0226\7V\2\2\u0226\u0227\7K\2\2"+ - "\u0227\u0228\7Q\2\2\u0228\u0229\7P\2\2\u0229\u022a\7U\2\2\u022aV\3\2\2"+ - "\2\u022b\u022c\7I\2\2\u022c\u022d\7T\2\2\u022d\u022e\7C\2\2\u022e\u022f"+ - "\7R\2\2\u022f\u0230\7J\2\2\u0230\u0231\7X\2\2\u0231\u0232\7K\2\2\u0232"+ - "\u0233\7\\\2\2\u0233X\3\2\2\2\u0234\u0235\7I\2\2\u0235\u0236\7T\2\2\u0236"+ - "\u0237\7Q\2\2\u0237\u0238\7W\2\2\u0238\u0239\7R\2\2\u0239Z\3\2\2\2\u023a"+ - "\u023b\7J\2\2\u023b\u023c\7C\2\2\u023c\u023d\7X\2\2\u023d\u023e\7K\2\2"+ - "\u023e\u023f\7P\2\2\u023f\u0240\7I\2\2\u0240\\\3\2\2\2\u0241\u0242\7J"+ - "\2\2\u0242\u0243\7Q\2\2\u0243\u0244\7W\2\2\u0244\u0245\7T\2\2\u0245^\3"+ - "\2\2\2\u0246\u0247\7J\2\2\u0247\u0248\7Q\2\2\u0248\u0249\7W\2\2\u0249"+ - "\u024a\7T\2\2\u024a\u024b\7U\2\2\u024b`\3\2\2\2\u024c\u024d\7K\2\2\u024d"+ - "\u024e\7P\2\2\u024eb\3\2\2\2\u024f\u0250\7K\2\2\u0250\u0251\7P\2\2\u0251"+ - "\u0252\7E\2\2\u0252\u0253\7N\2\2\u0253\u0254\7W\2\2\u0254\u0255\7F\2\2"+ - "\u0255\u0256\7G\2\2\u0256d\3\2\2\2\u0257\u0258\7K\2\2\u0258\u0259\7P\2"+ - "\2\u0259\u025a\7P\2\2\u025a\u025b\7G\2\2\u025b\u025c\7T\2\2\u025cf\3\2"+ - "\2\2\u025d\u025e\7K\2\2\u025e\u025f\7P\2\2\u025f\u0260\7V\2\2\u0260\u0261"+ - "\7G\2\2\u0261\u0262\7T\2\2\u0262\u0263\7X\2\2\u0263\u0264\7C\2\2\u0264"+ - "\u0265\7N\2\2\u0265h\3\2\2\2\u0266\u0267\7K\2\2\u0267\u0268\7U\2\2\u0268"+ - "j\3\2\2\2\u0269\u026a\7L\2\2\u026a\u026b\7Q\2\2\u026b\u026c\7K\2\2\u026c"+ - "\u026d\7P\2\2\u026dl\3\2\2\2\u026e\u026f\7N\2\2\u026f\u0270\7C\2\2\u0270"+ - "\u0271\7U\2\2\u0271\u0272\7V\2\2\u0272n\3\2\2\2\u0273\u0274\7N\2\2\u0274"+ - "\u0275\7G\2\2\u0275\u0276\7H\2\2\u0276\u0277\7V\2\2\u0277p\3\2\2\2\u0278"+ - "\u0279\7N\2\2\u0279\u027a\7K\2\2\u027a\u027b\7M\2\2\u027b\u027c\7G\2\2"+ - "\u027cr\3\2\2\2\u027d\u027e\7N\2\2\u027e\u027f\7K\2\2\u027f\u0280\7O\2"+ - "\2\u0280\u0281\7K\2\2\u0281\u0282\7V\2\2\u0282t\3\2\2\2\u0283\u0284\7"+ - "O\2\2\u0284\u0285\7C\2\2\u0285\u0286\7R\2\2\u0286\u0287\7R\2\2\u0287\u0288"+ - "\7G\2\2\u0288\u0289\7F\2\2\u0289v\3\2\2\2\u028a\u028b\7O\2\2\u028b\u028c"+ - "\7C\2\2\u028c\u028d\7V\2\2\u028d\u028e\7E\2\2\u028e\u028f\7J\2\2\u028f"+ - "x\3\2\2\2\u0290\u0291\7O\2\2\u0291\u0292\7K\2\2\u0292\u0293\7P\2\2\u0293"+ - "\u0294\7W\2\2\u0294\u0295\7V\2\2\u0295\u0296\7G\2\2\u0296z\3\2\2\2\u0297"+ - "\u0298\7O\2\2\u0298\u0299\7K\2\2\u0299\u029a\7P\2\2\u029a\u029b\7W\2\2"+ - "\u029b\u029c\7V\2\2\u029c\u029d\7G\2\2\u029d\u029e\7U\2\2\u029e|\3\2\2"+ - "\2\u029f\u02a0\7O\2\2\u02a0\u02a1\7Q\2\2\u02a1\u02a2\7P\2\2\u02a2\u02a3"+ - "\7V\2\2\u02a3\u02a4\7J\2\2\u02a4~\3\2\2\2\u02a5\u02a6\7O\2\2\u02a6\u02a7"+ - "\7Q\2\2\u02a7\u02a8\7P\2\2\u02a8\u02a9\7V\2\2\u02a9\u02aa\7J\2\2\u02aa"+ - "\u02ab\7U\2\2\u02ab\u0080\3\2\2\2\u02ac\u02ad\7P\2\2\u02ad\u02ae\7C\2"+ - "\2\u02ae\u02af\7V\2\2\u02af\u02b0\7W\2\2\u02b0\u02b1\7T\2\2\u02b1\u02b2"+ - "\7C\2\2\u02b2\u02b3\7N\2\2\u02b3\u0082\3\2\2\2\u02b4\u02b5\7P\2\2\u02b5"+ - "\u02b6\7Q\2\2\u02b6\u02b7\7V\2\2\u02b7\u0084\3\2\2\2\u02b8\u02b9\7P\2"+ - "\2\u02b9\u02ba\7W\2\2\u02ba\u02bb\7N\2\2\u02bb\u02bc\7N\2\2\u02bc\u0086"+ - "\3\2\2\2\u02bd\u02be\7P\2\2\u02be\u02bf\7W\2\2\u02bf\u02c0\7N\2\2\u02c0"+ - "\u02c1\7N\2\2\u02c1\u02c2\7U\2\2\u02c2\u0088\3\2\2\2\u02c3\u02c4\7Q\2"+ - "\2\u02c4\u02c5\7P\2\2\u02c5\u008a\3\2\2\2\u02c6\u02c7\7Q\2\2\u02c7\u02c8"+ - "\7R\2\2\u02c8\u02c9\7V\2\2\u02c9\u02ca\7K\2\2\u02ca\u02cb\7O\2\2\u02cb"+ - "\u02cc\7K\2\2\u02cc\u02cd\7\\\2\2\u02cd\u02ce\7G\2\2\u02ce\u02cf\7F\2"+ - "\2\u02cf\u008c\3\2\2\2\u02d0\u02d1\7Q\2\2\u02d1\u02d2\7T\2\2\u02d2\u008e"+ - "\3\2\2\2\u02d3\u02d4\7Q\2\2\u02d4\u02d5\7T\2\2\u02d5\u02d6\7F\2\2\u02d6"+ - "\u02d7\7G\2\2\u02d7\u02d8\7T\2\2\u02d8\u0090\3\2\2\2\u02d9\u02da\7Q\2"+ - "\2\u02da\u02db\7W\2\2\u02db\u02dc\7V\2\2\u02dc\u02dd\7G\2\2\u02dd\u02de"+ - "\7T\2\2\u02de\u0092\3\2\2\2\u02df\u02e0\7R\2\2\u02e0\u02e1\7C\2\2\u02e1"+ - "\u02e2\7T\2\2\u02e2\u02e3\7U\2\2\u02e3\u02e4\7G\2\2\u02e4\u02e5\7F\2\2"+ - "\u02e5\u0094\3\2\2\2\u02e6\u02e7\7R\2\2\u02e7\u02e8\7J\2\2\u02e8\u02e9"+ - "\7[\2\2\u02e9\u02ea\7U\2\2\u02ea\u02eb\7K\2\2\u02eb\u02ec\7E\2\2\u02ec"+ - "\u02ed\7C\2\2\u02ed\u02ee\7N\2\2\u02ee\u0096\3\2\2\2\u02ef\u02f0\7R\2"+ - "\2\u02f0\u02f1\7N\2\2\u02f1\u02f2\7C\2\2\u02f2\u02f3\7P\2\2\u02f3\u0098"+ - "\3\2\2\2\u02f4\u02f5\7T\2\2\u02f5\u02f6\7K\2\2\u02f6\u02f7\7I\2\2\u02f7"+ - "\u02f8\7J\2\2\u02f8\u02f9\7V\2\2\u02f9\u009a\3\2\2\2\u02fa\u02fb\7T\2"+ - "\2\u02fb\u02fc\7N\2\2\u02fc\u02fd\7K\2\2\u02fd\u02fe\7M\2\2\u02fe\u02ff"+ - "\7G\2\2\u02ff\u009c\3\2\2\2\u0300\u0301\7S\2\2\u0301\u0302\7W\2\2\u0302"+ - "\u0303\7G\2\2\u0303\u0304\7T\2\2\u0304\u0305\7[\2\2\u0305\u009e\3\2\2"+ - "\2\u0306\u0307\7U\2\2\u0307\u0308\7E\2\2\u0308\u0309\7J\2\2\u0309\u030a"+ - "\7G\2\2\u030a\u030b\7O\2\2\u030b\u030c\7C\2\2\u030c\u030d\7U\2\2\u030d"+ - "\u00a0\3\2\2\2\u030e\u030f\7U\2\2\u030f\u0310\7G\2\2\u0310\u0311\7E\2"+ - "\2\u0311\u0312\7Q\2\2\u0312\u0313\7P\2\2\u0313\u0314\7F\2\2\u0314\u00a2"+ - "\3\2\2\2\u0315\u0316\7U\2\2\u0316\u0317\7G\2\2\u0317\u0318\7E\2\2\u0318"+ - "\u0319\7Q\2\2\u0319\u031a\7P\2\2\u031a\u031b\7F\2\2\u031b\u031c\7U\2\2"+ - "\u031c\u00a4\3\2\2\2\u031d\u031e\7U\2\2\u031e\u031f\7G\2\2\u031f\u0320"+ - "\7N\2\2\u0320\u0321\7G\2\2\u0321\u0322\7E\2\2\u0322\u0323\7V\2\2\u0323"+ - "\u00a6\3\2\2\2\u0324\u0325\7U\2\2\u0325\u0326\7J\2\2\u0326\u0327\7Q\2"+ - "\2\u0327\u0328\7Y\2\2\u0328\u00a8\3\2\2\2\u0329\u032a\7U\2\2\u032a\u032b"+ - "\7[\2\2\u032b\u032c\7U\2\2\u032c\u00aa\3\2\2\2\u032d\u032e\7V\2\2\u032e"+ - "\u032f\7C\2\2\u032f\u0330\7D\2\2\u0330\u0331\7N\2\2\u0331\u0332\7G\2\2"+ - "\u0332\u00ac\3\2\2\2\u0333\u0334\7V\2\2\u0334\u0335\7C\2\2\u0335\u0336"+ - "\7D\2\2\u0336\u0337\7N\2\2\u0337\u0338\7G\2\2\u0338\u0339\7U\2\2\u0339"+ - "\u00ae\3\2\2\2\u033a\u033b\7V\2\2\u033b\u033c\7G\2\2\u033c\u033d\7Z\2"+ - "\2\u033d\u033e\7V\2\2\u033e\u00b0\3\2\2\2\u033f\u0340\7V\2\2\u0340\u0341"+ - "\7J\2\2\u0341\u0342\7G\2\2\u0342\u0343\7P\2\2\u0343\u00b2\3\2\2\2\u0344"+ - "\u0345\7V\2\2\u0345\u0346\7T\2\2\u0346\u0347\7W\2\2\u0347\u0348\7G\2\2"+ - "\u0348\u00b4\3\2\2\2\u0349\u034a\7V\2\2\u034a\u034b\7Q\2\2\u034b\u00b6"+ - "\3\2\2\2\u034c\u034d\7V\2\2\u034d\u034e\7[\2\2\u034e\u034f\7R\2\2\u034f"+ - "\u0350\7G\2\2\u0350\u00b8\3\2\2\2\u0351\u0352\7V\2\2\u0352\u0353\7[\2"+ - "\2\u0353\u0354\7R\2\2\u0354\u0355\7G\2\2\u0355\u0356\7U\2\2\u0356\u00ba"+ - "\3\2\2\2\u0357\u0358\7W\2\2\u0358\u0359\7U\2\2\u0359\u035a\7K\2\2\u035a"+ - "\u035b\7P\2\2\u035b\u035c\7I\2\2\u035c\u00bc\3\2\2\2\u035d\u035e\7X\2"+ - "\2\u035e\u035f\7G\2\2\u035f\u0360\7T\2\2\u0360\u0361\7K\2\2\u0361\u0362"+ - "\7H\2\2\u0362\u0363\7[\2\2\u0363\u00be\3\2\2\2\u0364\u0365\7Y\2\2\u0365"+ - "\u0366\7J\2\2\u0366\u0367\7G\2\2\u0367\u0368\7P\2\2\u0368\u00c0\3\2\2"+ - "\2\u0369\u036a\7Y\2\2\u036a\u036b\7J\2\2\u036b\u036c\7G\2\2\u036c\u036d"+ - "\7T\2\2\u036d\u036e\7G\2\2\u036e\u00c2\3\2\2\2\u036f\u0370\7Y\2\2\u0370"+ - "\u0371\7K\2\2\u0371\u0372\7V\2\2\u0372\u0373\7J\2\2\u0373\u00c4\3\2\2"+ - "\2\u0374\u0375\7[\2\2\u0375\u0376\7G\2\2\u0376\u0377\7C\2\2\u0377\u0378"+ - "\7T\2\2\u0378\u00c6\3\2\2\2\u0379\u037a\7[\2\2\u037a\u037b\7G\2\2\u037b"+ - "\u037c\7C\2\2\u037c\u037d\7T\2\2\u037d\u037e\7U\2\2\u037e\u00c8\3\2\2"+ - "\2\u037f\u0380\7}\2\2\u0380\u0381\7G\2\2\u0381\u0382\7U\2\2\u0382\u0383"+ - "\7E\2\2\u0383\u0384\7C\2\2\u0384\u0385\7R\2\2\u0385\u0386\7G\2\2\u0386"+ - "\u00ca\3\2\2\2\u0387\u0388\7}\2\2\u0388\u0389\7H\2\2\u0389\u038a\7P\2"+ - "\2\u038a\u00cc\3\2\2\2\u038b\u038c\7}\2\2\u038c\u038d\7N\2\2\u038d\u038e"+ - "\7K\2\2\u038e\u038f\7O\2\2\u038f\u0390\7K\2\2\u0390\u0391\7V\2\2\u0391"+ - "\u00ce\3\2\2\2\u0392\u0393\7}\2\2\u0393\u0394\7F\2\2\u0394\u00d0\3\2\2"+ - "\2\u0395\u0396\7}\2\2\u0396\u0397\7V\2\2\u0397\u00d2\3\2\2\2\u0398\u0399"+ - "\7}\2\2\u0399\u039a\7V\2\2\u039a\u039b\7U\2\2\u039b\u00d4\3\2\2\2\u039c"+ - "\u039d\7}\2\2\u039d\u039e\7I\2\2\u039e\u039f\7W\2\2\u039f\u03a0\7K\2\2"+ - "\u03a0\u03a1\7F\2\2\u03a1\u00d6\3\2\2\2\u03a2\u03a3\7\177\2\2\u03a3\u00d8"+ - "\3\2\2\2\u03a4\u03a5\7?\2\2\u03a5\u00da\3\2\2\2\u03a6\u03a7\7>\2\2\u03a7"+ - "\u03a8\7?\2\2\u03a8\u03a9\7@\2\2\u03a9\u00dc\3\2\2\2\u03aa\u03ab\7>\2"+ - "\2\u03ab\u03af\7@\2\2\u03ac\u03ad\7#\2\2\u03ad\u03af\7?\2\2\u03ae\u03aa"+ - "\3\2\2\2\u03ae\u03ac\3\2\2\2\u03af\u00de\3\2\2\2\u03b0\u03b1\7>\2\2\u03b1"+ - "\u00e0\3\2\2\2\u03b2\u03b3\7>\2\2\u03b3\u03b4\7?\2\2\u03b4\u00e2\3\2\2"+ - "\2\u03b5\u03b6\7@\2\2\u03b6\u00e4\3\2\2\2\u03b7\u03b8\7@\2\2\u03b8\u03b9"+ - "\7?\2\2\u03b9\u00e6\3\2\2\2\u03ba\u03bb\7-\2\2\u03bb\u00e8\3\2\2\2\u03bc"+ - "\u03bd\7/\2\2\u03bd\u00ea\3\2\2\2\u03be\u03bf\7,\2\2\u03bf\u00ec\3\2\2"+ - "\2\u03c0\u03c1\7\61\2\2\u03c1\u00ee\3\2\2\2\u03c2\u03c3\7\'\2\2\u03c3"+ - "\u00f0\3\2\2\2\u03c4\u03c5\7<\2\2\u03c5\u03c6\7<\2\2\u03c6\u00f2\3\2\2"+ - "\2\u03c7\u03c8\7~\2\2\u03c8\u03c9\7~\2\2\u03c9\u00f4\3\2\2\2\u03ca\u03cb"+ - "\7\60\2\2\u03cb\u00f6\3\2\2\2\u03cc\u03cd\7A\2\2\u03cd\u00f8\3\2\2\2\u03ce"+ - "\u03d4\7)\2\2\u03cf\u03d3\n\2\2\2\u03d0\u03d1\7)\2\2\u03d1\u03d3\7)\2"+ - "\2\u03d2\u03cf\3\2\2\2\u03d2\u03d0\3\2\2\2\u03d3\u03d6\3\2\2\2\u03d4\u03d2"+ - "\3\2\2\2\u03d4\u03d5\3\2\2\2\u03d5\u03d7\3\2\2\2\u03d6\u03d4\3\2\2\2\u03d7"+ - "\u03d8\7)\2\2\u03d8\u00fa\3\2\2\2\u03d9\u03db\5\u010b\u0086\2\u03da\u03d9"+ - "\3\2\2\2\u03db\u03dc\3\2\2\2\u03dc\u03da\3\2\2\2\u03dc\u03dd\3\2\2\2\u03dd"+ - "\u00fc\3\2\2\2\u03de\u03e0\5\u010b\u0086\2\u03df\u03de\3\2\2\2\u03e0\u03e1"+ - "\3\2\2\2\u03e1\u03df\3\2\2\2\u03e1\u03e2\3\2\2\2\u03e2\u03e3\3\2\2\2\u03e3"+ - "\u03e7\5\u00f5{\2\u03e4\u03e6\5\u010b\u0086\2\u03e5\u03e4\3\2\2\2\u03e6"+ - "\u03e9\3\2\2\2\u03e7\u03e5\3\2\2\2\u03e7\u03e8\3\2\2\2\u03e8\u0409\3\2"+ - "\2\2\u03e9\u03e7\3\2\2\2\u03ea\u03ec\5\u00f5{\2\u03eb\u03ed\5\u010b\u0086"+ - "\2\u03ec\u03eb\3\2\2\2\u03ed\u03ee\3\2\2\2\u03ee\u03ec\3\2\2\2\u03ee\u03ef"+ - "\3\2\2\2\u03ef\u0409\3\2\2\2\u03f0\u03f2\5\u010b\u0086\2\u03f1\u03f0\3"+ - "\2\2\2\u03f2\u03f3\3\2\2\2\u03f3\u03f1\3\2\2\2\u03f3\u03f4\3\2\2\2\u03f4"+ - "\u03fc\3\2\2\2\u03f5\u03f9\5\u00f5{\2\u03f6\u03f8\5\u010b\u0086\2\u03f7"+ - "\u03f6\3\2\2\2\u03f8\u03fb\3\2\2\2\u03f9\u03f7\3\2\2\2\u03f9\u03fa\3\2"+ - "\2\2\u03fa\u03fd\3\2\2\2\u03fb\u03f9\3\2\2\2\u03fc\u03f5\3\2\2\2\u03fc"+ - "\u03fd\3\2\2\2\u03fd\u03fe\3\2\2\2\u03fe\u03ff\5\u0109\u0085\2\u03ff\u0409"+ - "\3\2\2\2\u0400\u0402\5\u00f5{\2\u0401\u0403\5\u010b\u0086\2\u0402\u0401"+ - "\3\2\2\2\u0403\u0404\3\2\2\2\u0404\u0402\3\2\2\2\u0404\u0405\3\2\2\2\u0405"+ - "\u0406\3\2\2\2\u0406\u0407\5\u0109\u0085\2\u0407\u0409\3\2\2\2\u0408\u03df"+ - "\3\2\2\2\u0408\u03ea\3\2\2\2\u0408\u03f1\3\2\2\2\u0408\u0400\3\2\2\2\u0409"+ - "\u00fe\3\2\2\2\u040a\u040d\5\u010d\u0087\2\u040b\u040d\7a\2\2\u040c\u040a"+ - "\3\2\2\2\u040c\u040b\3\2\2\2\u040d\u0413\3\2\2\2\u040e\u0412\5\u010d\u0087"+ - "\2\u040f\u0412\5\u010b\u0086\2\u0410\u0412\t\3\2\2\u0411\u040e\3\2\2\2"+ - "\u0411\u040f\3\2\2\2\u0411\u0410\3\2\2\2\u0412\u0415\3\2\2\2\u0413\u0411"+ - "\3\2\2\2\u0413\u0414\3\2\2\2\u0414\u0100\3\2\2\2\u0415\u0413\3\2\2\2\u0416"+ - "\u041a\5\u010b\u0086\2\u0417\u041b\5\u010d\u0087\2\u0418\u041b\5\u010b"+ - "\u0086\2\u0419\u041b\t\3\2\2\u041a\u0417\3\2\2\2\u041a\u0418\3\2\2\2\u041a"+ - "\u0419\3\2\2\2\u041b\u041c\3\2\2\2\u041c\u041a\3\2\2\2\u041c\u041d\3\2"+ - "\2\2\u041d\u0102\3\2\2\2\u041e\u0422\5\u010d\u0087\2\u041f\u0422\5\u010b"+ - "\u0086\2\u0420\u0422\7a\2\2\u0421\u041e\3\2\2\2\u0421\u041f\3\2\2\2\u0421"+ - "\u0420\3\2\2\2\u0422\u0423\3\2\2\2\u0423\u0421\3\2\2\2\u0423\u0424\3\2"+ - "\2\2\u0424\u0104\3\2\2\2\u0425\u042b\7$\2\2\u0426\u042a\n\4\2\2\u0427"+ - "\u0428\7$\2\2\u0428\u042a\7$\2\2\u0429\u0426\3\2\2\2\u0429\u0427\3\2\2"+ - "\2\u042a\u042d\3\2\2\2\u042b\u0429\3\2\2\2\u042b\u042c\3\2\2\2\u042c\u042e"+ - "\3\2\2\2\u042d\u042b\3\2\2\2\u042e\u042f\7$\2\2\u042f\u0106\3\2\2\2\u0430"+ - "\u0436\7b\2\2\u0431\u0435\n\5\2\2\u0432\u0433\7b\2\2\u0433\u0435\7b\2"+ - "\2\u0434\u0431\3\2\2\2\u0434\u0432\3\2\2\2\u0435\u0438\3\2\2\2\u0436\u0434"+ - "\3\2\2\2\u0436\u0437\3\2\2\2\u0437\u0439\3\2\2\2\u0438\u0436\3\2\2\2\u0439"+ - "\u043a\7b\2\2\u043a\u0108\3\2\2\2\u043b\u043d\7G\2\2\u043c\u043e\t\6\2"+ - "\2\u043d\u043c\3\2\2\2\u043d\u043e\3\2\2\2\u043e\u0440\3\2\2\2\u043f\u0441"+ - "\5\u010b\u0086\2\u0440\u043f\3\2\2\2\u0441\u0442\3\2\2\2\u0442\u0440\3"+ - "\2\2\2\u0442\u0443\3\2\2\2\u0443\u010a\3\2\2\2\u0444\u0445\t\7\2\2\u0445"+ - "\u010c\3\2\2\2\u0446\u0447\t\b\2\2\u0447\u010e\3\2\2\2\u0448\u0449\7/"+ - "\2\2\u0449\u044a\7/\2\2\u044a\u044e\3\2\2\2\u044b\u044d\n\t\2\2\u044c"+ - "\u044b\3\2\2\2\u044d\u0450\3\2\2\2\u044e\u044c\3\2\2\2\u044e\u044f\3\2"+ - "\2\2\u044f\u0452\3\2\2\2\u0450\u044e\3\2\2\2\u0451\u0453\7\17\2\2\u0452"+ - "\u0451\3\2\2\2\u0452\u0453\3\2\2\2\u0453\u0455\3\2\2\2\u0454\u0456\7\f"+ - "\2\2\u0455\u0454\3\2\2\2\u0455\u0456\3\2\2\2\u0456\u0457\3\2\2\2\u0457"+ - "\u0458\b\u0088\2\2\u0458\u0110\3\2\2\2\u0459\u045a\7\61\2\2\u045a\u045b"+ - "\7,\2\2\u045b\u0460\3\2\2\2\u045c\u045f\5\u0111\u0089\2\u045d\u045f\13"+ - "\2\2\2\u045e\u045c\3\2\2\2\u045e\u045d\3\2\2\2\u045f\u0462\3\2\2\2\u0460"+ - "\u0461\3\2\2\2\u0460\u045e\3\2\2\2\u0461\u0463\3\2\2\2\u0462\u0460\3\2"+ - "\2\2\u0463\u0464\7,\2\2\u0464\u0465\7\61\2\2\u0465\u0466\3\2\2\2\u0466"+ - "\u0467\b\u0089\2\2\u0467\u0112\3\2\2\2\u0468\u046a\t\n\2\2\u0469\u0468"+ - "\3\2\2\2\u046a\u046b\3\2\2\2\u046b\u0469\3\2\2\2\u046b\u046c\3\2\2\2\u046c"+ - "\u046d\3\2\2\2\u046d\u046e\b\u008a\2\2\u046e\u0114\3\2\2\2\u046f\u0470"+ - "\13\2\2\2\u0470\u0116\3\2\2\2\"\2\u03ae\u03d2\u03d4\u03dc\u03e1\u03e7"+ - "\u03ee\u03f3\u03f9\u03fc\u0404\u0408\u040c\u0411\u0413\u041a\u041c\u0421"+ - "\u0423\u0429\u042b\u0434\u0436\u043d\u0442\u044e\u0452\u0455\u045e\u0460"+ - "\u046b\3\2\3\2"; + "\t\u0089\4\u008a\t\u008a\4\u008b\t\u008b\4\u008c\t\u008c\4\u008d\t\u008d"+ + "\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3"+ + "\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n"+ + "\3\n\3\n\3\13\3\13\3\13\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3"+ + "\r\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3"+ + "\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3"+ + "\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3"+ + "\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3"+ + "\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3"+ + "\26\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3"+ + "\27\3\27\3\27\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3"+ + "\31\3\31\3\32\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\34\3"+ + "\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\35\3\35\3"+ + "\35\3\35\3\35\3\36\3\36\3\36\3\36\3\36\3\37\3\37\3\37\3\37\3 \3 \3 \3"+ + " \3 \3 \3 \3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3"+ + "\"\3#\3#\3#\3#\3#\3#\3#\3#\3$\3$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\3%\3"+ + "&\3&\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3(\3(\3)\3)\3)\3)\3)\3"+ + "*\3*\3*\3*\3*\3*\3*\3+\3+\3+\3+\3+\3,\3,\3,\3,\3,\3,\3,\3,\3,\3,\3-\3"+ + "-\3-\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3.\3.\3/\3/\3/\3/\3/\3/\3/\3\60\3\60"+ + "\3\60\3\60\3\60\3\61\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3\63\3\63"+ + "\3\63\3\63\3\63\3\63\3\63\3\63\3\64\3\64\3\64\3\64\3\64\3\64\3\65\3\65"+ + "\3\65\3\65\3\65\3\65\3\65\3\65\3\65\3\66\3\66\3\66\3\67\3\67\3\67\3\67"+ + "\3\67\38\38\38\38\38\39\39\39\39\39\3:\3:\3:\3:\3:\3;\3;\3;\3;\3;\3;\3"+ + "<\3<\3<\3<\3<\3<\3<\3=\3=\3=\3=\3=\3=\3>\3>\3>\3>\3>\3>\3>\3?\3?\3?\3"+ + "?\3?\3?\3?\3?\3@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3A\3A\3B\3B\3B\3B\3B\3"+ + "B\3B\3B\3C\3C\3C\3C\3D\3D\3D\3D\3D\3E\3E\3E\3E\3E\3E\3F\3F\3F\3G\3G\3"+ + "G\3G\3G\3G\3G\3G\3G\3G\3H\3H\3H\3I\3I\3I\3I\3I\3I\3J\3J\3J\3J\3J\3J\3"+ + "K\3K\3K\3K\3K\3K\3K\3L\3L\3L\3L\3L\3L\3L\3L\3L\3M\3M\3M\3M\3M\3M\3N\3"+ + "N\3N\3N\3N\3O\3O\3O\3O\3O\3O\3P\3P\3P\3P\3P\3P\3Q\3Q\3Q\3Q\3Q\3Q\3R\3"+ + "R\3R\3R\3R\3R\3R\3R\3S\3S\3S\3S\3S\3S\3S\3T\3T\3T\3T\3T\3T\3T\3T\3U\3"+ + "U\3U\3U\3U\3U\3U\3V\3V\3V\3V\3V\3W\3W\3W\3W\3X\3X\3X\3X\3X\3X\3Y\3Y\3"+ + "Y\3Y\3Y\3Y\3Y\3Z\3Z\3Z\3Z\3Z\3[\3[\3[\3[\3[\3\\\3\\\3\\\3\\\3\\\3]\3]"+ + "\3]\3^\3^\3^\3^\3^\3_\3_\3_\3_\3_\3_\3`\3`\3`\3`\3`\3`\3a\3a\3a\3a\3a"+ + "\3a\3a\3b\3b\3b\3b\3b\3c\3c\3c\3c\3c\3c\3d\3d\3d\3d\3d\3e\3e\3e\3e\3e"+ + "\3f\3f\3f\3f\3f\3f\3g\3g\3g\3g\3g\3g\3g\3g\3h\3h\3h\3h\3i\3i\3i\3i\3i"+ + "\3i\3i\3j\3j\3j\3k\3k\3k\3l\3l\3l\3l\3m\3m\3m\3m\3m\3m\3n\3n\3o\3o\3p"+ + "\3p\3p\3p\3q\3q\3q\3q\5q\u03bd\nq\3r\3r\3s\3s\3s\3t\3t\3u\3u\3u\3v\3v"+ + "\3w\3w\3x\3x\3y\3y\3z\3z\3{\3{\3{\3|\3|\3|\3}\3}\3~\3~\3\177\3\177\3\177"+ + "\3\177\7\177\u03e1\n\177\f\177\16\177\u03e4\13\177\3\177\3\177\3\u0080"+ + "\6\u0080\u03e9\n\u0080\r\u0080\16\u0080\u03ea\3\u0081\6\u0081\u03ee\n"+ + "\u0081\r\u0081\16\u0081\u03ef\3\u0081\3\u0081\7\u0081\u03f4\n\u0081\f"+ + "\u0081\16\u0081\u03f7\13\u0081\3\u0081\3\u0081\6\u0081\u03fb\n\u0081\r"+ + "\u0081\16\u0081\u03fc\3\u0081\6\u0081\u0400\n\u0081\r\u0081\16\u0081\u0401"+ + "\3\u0081\3\u0081\7\u0081\u0406\n\u0081\f\u0081\16\u0081\u0409\13\u0081"+ + "\5\u0081\u040b\n\u0081\3\u0081\3\u0081\3\u0081\3\u0081\6\u0081\u0411\n"+ + "\u0081\r\u0081\16\u0081\u0412\3\u0081\3\u0081\5\u0081\u0417\n\u0081\3"+ + "\u0082\3\u0082\5\u0082\u041b\n\u0082\3\u0082\3\u0082\3\u0082\7\u0082\u0420"+ + "\n\u0082\f\u0082\16\u0082\u0423\13\u0082\3\u0083\3\u0083\3\u0083\3\u0083"+ + "\6\u0083\u0429\n\u0083\r\u0083\16\u0083\u042a\3\u0084\3\u0084\3\u0084"+ + "\6\u0084\u0430\n\u0084\r\u0084\16\u0084\u0431\3\u0085\3\u0085\3\u0085"+ + "\3\u0085\7\u0085\u0438\n\u0085\f\u0085\16\u0085\u043b\13\u0085\3\u0085"+ + "\3\u0085\3\u0086\3\u0086\3\u0086\3\u0086\7\u0086\u0443\n\u0086\f\u0086"+ + "\16\u0086\u0446\13\u0086\3\u0086\3\u0086\3\u0087\3\u0087\5\u0087\u044c"+ + "\n\u0087\3\u0087\6\u0087\u044f\n\u0087\r\u0087\16\u0087\u0450\3\u0088"+ + "\3\u0088\3\u0089\3\u0089\3\u008a\3\u008a\3\u008a\3\u008a\7\u008a\u045b"+ + "\n\u008a\f\u008a\16\u008a\u045e\13\u008a\3\u008a\5\u008a\u0461\n\u008a"+ + "\3\u008a\5\u008a\u0464\n\u008a\3\u008a\3\u008a\3\u008b\3\u008b\3\u008b"+ + "\3\u008b\3\u008b\7\u008b\u046d\n\u008b\f\u008b\16\u008b\u0470\13\u008b"+ + "\3\u008b\3\u008b\3\u008b\3\u008b\3\u008b\3\u008c\6\u008c\u0478\n\u008c"+ + "\r\u008c\16\u008c\u0479\3\u008c\3\u008c\3\u008d\3\u008d\3\u046e\2\u008e"+ + "\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20"+ + "\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37"+ + "= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o"+ + "9q:s;u{?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089F\u008bG\u008dH"+ + "\u008fI\u0091J\u0093K\u0095L\u0097M\u0099N\u009bO\u009dP\u009fQ\u00a1"+ + "R\u00a3S\u00a5T\u00a7U\u00a9V\u00abW\u00adX\u00afY\u00b1Z\u00b3[\u00b5"+ + "\\\u00b7]\u00b9^\u00bb_\u00bd`\u00bfa\u00c1b\u00c3c\u00c5d\u00c7e\u00c9"+ + "f\u00cbg\u00cdh\u00cfi\u00d1j\u00d3k\u00d5l\u00d7m\u00d9n\u00dbo\u00dd"+ + "p\u00dfq\u00e1r\u00e3s\u00e5t\u00e7u\u00e9v\u00ebw\u00edx\u00efy\u00f1"+ + "z\u00f3{\u00f5|\u00f7}\u00f9~\u00fb\177\u00fd\u0080\u00ff\u0081\u0101"+ + "\u0082\u0103\u0083\u0105\u0084\u0107\u0085\u0109\u0086\u010b\u0087\u010d"+ + "\2\u010f\2\u0111\2\u0113\u0088\u0115\u0089\u0117\u008a\u0119\u008b\3\2"+ + "\13\3\2))\4\2BBaa\3\2$$\3\2bb\4\2--//\3\2\62;\3\2C\\\4\2\f\f\17\17\5\2"+ + "\13\f\17\17\"\"\u049f\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2"+ + "\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25"+ + "\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2"+ + "\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2"+ + "\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3"+ + "\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2"+ + "\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2"+ + "Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3"+ + "\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2\2\2i\3\2\2"+ + "\2\2k\3\2\2\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2\2u\3\2\2\2\2"+ + "w\3\2\2\2\2y\3\2\2\2\2{\3\2\2\2\2}\3\2\2\2\2\177\3\2\2\2\2\u0081\3\2\2"+ + "\2\2\u0083\3\2\2\2\2\u0085\3\2\2\2\2\u0087\3\2\2\2\2\u0089\3\2\2\2\2\u008b"+ + "\3\2\2\2\2\u008d\3\2\2\2\2\u008f\3\2\2\2\2\u0091\3\2\2\2\2\u0093\3\2\2"+ + "\2\2\u0095\3\2\2\2\2\u0097\3\2\2\2\2\u0099\3\2\2\2\2\u009b\3\2\2\2\2\u009d"+ + "\3\2\2\2\2\u009f\3\2\2\2\2\u00a1\3\2\2\2\2\u00a3\3\2\2\2\2\u00a5\3\2\2"+ + "\2\2\u00a7\3\2\2\2\2\u00a9\3\2\2\2\2\u00ab\3\2\2\2\2\u00ad\3\2\2\2\2\u00af"+ + "\3\2\2\2\2\u00b1\3\2\2\2\2\u00b3\3\2\2\2\2\u00b5\3\2\2\2\2\u00b7\3\2\2"+ + "\2\2\u00b9\3\2\2\2\2\u00bb\3\2\2\2\2\u00bd\3\2\2\2\2\u00bf\3\2\2\2\2\u00c1"+ + "\3\2\2\2\2\u00c3\3\2\2\2\2\u00c5\3\2\2\2\2\u00c7\3\2\2\2\2\u00c9\3\2\2"+ + "\2\2\u00cb\3\2\2\2\2\u00cd\3\2\2\2\2\u00cf\3\2\2\2\2\u00d1\3\2\2\2\2\u00d3"+ + "\3\2\2\2\2\u00d5\3\2\2\2\2\u00d7\3\2\2\2\2\u00d9\3\2\2\2\2\u00db\3\2\2"+ + "\2\2\u00dd\3\2\2\2\2\u00df\3\2\2\2\2\u00e1\3\2\2\2\2\u00e3\3\2\2\2\2\u00e5"+ + "\3\2\2\2\2\u00e7\3\2\2\2\2\u00e9\3\2\2\2\2\u00eb\3\2\2\2\2\u00ed\3\2\2"+ + "\2\2\u00ef\3\2\2\2\2\u00f1\3\2\2\2\2\u00f3\3\2\2\2\2\u00f5\3\2\2\2\2\u00f7"+ + "\3\2\2\2\2\u00f9\3\2\2\2\2\u00fb\3\2\2\2\2\u00fd\3\2\2\2\2\u00ff\3\2\2"+ + "\2\2\u0101\3\2\2\2\2\u0103\3\2\2\2\2\u0105\3\2\2\2\2\u0107\3\2\2\2\2\u0109"+ + "\3\2\2\2\2\u010b\3\2\2\2\2\u0113\3\2\2\2\2\u0115\3\2\2\2\2\u0117\3\2\2"+ + "\2\2\u0119\3\2\2\2\3\u011b\3\2\2\2\5\u011d\3\2\2\2\7\u011f\3\2\2\2\t\u0121"+ + "\3\2\2\2\13\u0123\3\2\2\2\r\u0127\3\2\2\2\17\u012f\3\2\2\2\21\u0138\3"+ + "\2\2\2\23\u013c\3\2\2\2\25\u0140\3\2\2\2\27\u0143\3\2\2\2\31\u0147\3\2"+ + "\2\2\33\u014f\3\2\2\2\35\u0152\3\2\2\2\37\u0157\3\2\2\2!\u015c\3\2\2\2"+ + "#\u0164\3\2\2\2%\u016d\3\2\2\2\'\u0175\3\2\2\2)\u017d\3\2\2\2+\u018a\3"+ + "\2\2\2-\u0197\3\2\2\2/\u01a9\3\2\2\2\61\u01ad\3\2\2\2\63\u01b2\3\2\2\2"+ + "\65\u01b8\3\2\2\2\67\u01bd\3\2\2\29\u01c6\3\2\2\2;\u01cf\3\2\2\2=\u01d4"+ + "\3\2\2\2?\u01d8\3\2\2\2A\u01df\3\2\2\2C\u01ea\3\2\2\2E\u01f1\3\2\2\2G"+ + "\u01f9\3\2\2\2I\u0201\3\2\2\2K\u0207\3\2\2\2M\u020d\3\2\2\2O\u0211\3\2"+ + "\2\2Q\u0218\3\2\2\2S\u021d\3\2\2\2U\u0224\3\2\2\2W\u0229\3\2\2\2Y\u0233"+ + "\3\2\2\2[\u023c\3\2\2\2]\u0242\3\2\2\2_\u0249\3\2\2\2a\u024e\3\2\2\2c"+ + "\u0254\3\2\2\2e\u0257\3\2\2\2g\u025f\3\2\2\2i\u0265\3\2\2\2k\u026e\3\2"+ + "\2\2m\u0271\3\2\2\2o\u0276\3\2\2\2q\u027b\3\2\2\2s\u0280\3\2\2\2u\u0285"+ + "\3\2\2\2w\u028b\3\2\2\2y\u0292\3\2\2\2{\u0298\3\2\2\2}\u029f\3\2\2\2\177"+ + "\u02a7\3\2\2\2\u0081\u02ad\3\2\2\2\u0083\u02b4\3\2\2\2\u0085\u02bc\3\2"+ + "\2\2\u0087\u02c0\3\2\2\2\u0089\u02c5\3\2\2\2\u008b\u02cb\3\2\2\2\u008d"+ + "\u02ce\3\2\2\2\u008f\u02d8\3\2\2\2\u0091\u02db\3\2\2\2\u0093\u02e1\3\2"+ + "\2\2\u0095\u02e7\3\2\2\2\u0097\u02ee\3\2\2\2\u0099\u02f7\3\2\2\2\u009b"+ + "\u02fd\3\2\2\2\u009d\u0302\3\2\2\2\u009f\u0308\3\2\2\2\u00a1\u030e\3\2"+ + "\2\2\u00a3\u0314\3\2\2\2\u00a5\u031c\3\2\2\2\u00a7\u0323\3\2\2\2\u00a9"+ + "\u032b\3\2\2\2\u00ab\u0332\3\2\2\2\u00ad\u0337\3\2\2\2\u00af\u033b\3\2"+ + "\2\2\u00b1\u0341\3\2\2\2\u00b3\u0348\3\2\2\2\u00b5\u034d\3\2\2\2\u00b7"+ + "\u0352\3\2\2\2\u00b9\u0357\3\2\2\2\u00bb\u035a\3\2\2\2\u00bd\u035f\3\2"+ + "\2\2\u00bf\u0365\3\2\2\2\u00c1\u036b\3\2\2\2\u00c3\u0372\3\2\2\2\u00c5"+ + "\u0377\3\2\2\2\u00c7\u037d\3\2\2\2\u00c9\u0382\3\2\2\2\u00cb\u0387\3\2"+ + "\2\2\u00cd\u038d\3\2\2\2\u00cf\u0395\3\2\2\2\u00d1\u0399\3\2\2\2\u00d3"+ + "\u03a0\3\2\2\2\u00d5\u03a3\3\2\2\2\u00d7\u03a6\3\2\2\2\u00d9\u03aa\3\2"+ + "\2\2\u00db\u03b0\3\2\2\2\u00dd\u03b2\3\2\2\2\u00df\u03b4\3\2\2\2\u00e1"+ + "\u03bc\3\2\2\2\u00e3\u03be\3\2\2\2\u00e5\u03c0\3\2\2\2\u00e7\u03c3\3\2"+ + "\2\2\u00e9\u03c5\3\2\2\2\u00eb\u03c8\3\2\2\2\u00ed\u03ca\3\2\2\2\u00ef"+ + "\u03cc\3\2\2\2\u00f1\u03ce\3\2\2\2\u00f3\u03d0\3\2\2\2\u00f5\u03d2\3\2"+ + "\2\2\u00f7\u03d5\3\2\2\2\u00f9\u03d8\3\2\2\2\u00fb\u03da\3\2\2\2\u00fd"+ + "\u03dc\3\2\2\2\u00ff\u03e8\3\2\2\2\u0101\u0416\3\2\2\2\u0103\u041a\3\2"+ + "\2\2\u0105\u0424\3\2\2\2\u0107\u042f\3\2\2\2\u0109\u0433\3\2\2\2\u010b"+ + "\u043e\3\2\2\2\u010d\u0449\3\2\2\2\u010f\u0452\3\2\2\2\u0111\u0454\3\2"+ + "\2\2\u0113\u0456\3\2\2\2\u0115\u0467\3\2\2\2\u0117\u0477\3\2\2\2\u0119"+ + "\u047d\3\2\2\2\u011b\u011c\7*\2\2\u011c\4\3\2\2\2\u011d\u011e\7+\2\2\u011e"+ + "\6\3\2\2\2\u011f\u0120\7.\2\2\u0120\b\3\2\2\2\u0121\u0122\7<\2\2\u0122"+ + "\n\3\2\2\2\u0123\u0124\7C\2\2\u0124\u0125\7N\2\2\u0125\u0126\7N\2\2\u0126"+ + "\f\3\2\2\2\u0127\u0128\7C\2\2\u0128\u0129\7P\2\2\u0129\u012a\7C\2\2\u012a"+ + "\u012b\7N\2\2\u012b\u012c\7[\2\2\u012c\u012d\7\\\2\2\u012d\u012e\7G\2"+ + "\2\u012e\16\3\2\2\2\u012f\u0130\7C\2\2\u0130\u0131\7P\2\2\u0131\u0132"+ + "\7C\2\2\u0132\u0133\7N\2\2\u0133\u0134\7[\2\2\u0134\u0135\7\\\2\2\u0135"+ + "\u0136\7G\2\2\u0136\u0137\7F\2\2\u0137\20\3\2\2\2\u0138\u0139\7C\2\2\u0139"+ + "\u013a\7P\2\2\u013a\u013b\7F\2\2\u013b\22\3\2\2\2\u013c\u013d\7C\2\2\u013d"+ + "\u013e\7P\2\2\u013e\u013f\7[\2\2\u013f\24\3\2\2\2\u0140\u0141\7C\2\2\u0141"+ + "\u0142\7U\2\2\u0142\26\3\2\2\2\u0143\u0144\7C\2\2\u0144\u0145\7U\2\2\u0145"+ + "\u0146\7E\2\2\u0146\30\3\2\2\2\u0147\u0148\7D\2\2\u0148\u0149\7G\2\2\u0149"+ + "\u014a\7V\2\2\u014a\u014b\7Y\2\2\u014b\u014c\7G\2\2\u014c\u014d\7G\2\2"+ + "\u014d\u014e\7P\2\2\u014e\32\3\2\2\2\u014f\u0150\7D\2\2\u0150\u0151\7"+ + "[\2\2\u0151\34\3\2\2\2\u0152\u0153\7E\2\2\u0153\u0154\7C\2\2\u0154\u0155"+ + "\7U\2\2\u0155\u0156\7G\2\2\u0156\36\3\2\2\2\u0157\u0158\7E\2\2\u0158\u0159"+ + "\7C\2\2\u0159\u015a\7U\2\2\u015a\u015b\7V\2\2\u015b \3\2\2\2\u015c\u015d"+ + "\7E\2\2\u015d\u015e\7C\2\2\u015e\u015f\7V\2\2\u015f\u0160\7C\2\2\u0160"+ + "\u0161\7N\2\2\u0161\u0162\7Q\2\2\u0162\u0163\7I\2\2\u0163\"\3\2\2\2\u0164"+ + "\u0165\7E\2\2\u0165\u0166\7C\2\2\u0166\u0167\7V\2\2\u0167\u0168\7C\2\2"+ + "\u0168\u0169\7N\2\2\u0169\u016a\7Q\2\2\u016a\u016b\7I\2\2\u016b\u016c"+ + "\7U\2\2\u016c$\3\2\2\2\u016d\u016e\7E\2\2\u016e\u016f\7Q\2\2\u016f\u0170"+ + "\7N\2\2\u0170\u0171\7W\2\2\u0171\u0172\7O\2\2\u0172\u0173\7P\2\2\u0173"+ + "\u0174\7U\2\2\u0174&\3\2\2\2\u0175\u0176\7E\2\2\u0176\u0177\7Q\2\2\u0177"+ + "\u0178\7P\2\2\u0178\u0179\7X\2\2\u0179\u017a\7G\2\2\u017a\u017b\7T\2\2"+ + "\u017b\u017c\7V\2\2\u017c(\3\2\2\2\u017d\u017e\7E\2\2\u017e\u017f\7W\2"+ + "\2\u017f\u0180\7T\2\2\u0180\u0181\7T\2\2\u0181\u0182\7G\2\2\u0182\u0183"+ + "\7P\2\2\u0183\u0184\7V\2\2\u0184\u0185\7a\2\2\u0185\u0186\7F\2\2\u0186"+ + "\u0187\7C\2\2\u0187\u0188\7V\2\2\u0188\u0189\7G\2\2\u0189*\3\2\2\2\u018a"+ + "\u018b\7E\2\2\u018b\u018c\7W\2\2\u018c\u018d\7T\2\2\u018d\u018e\7T\2\2"+ + "\u018e\u018f\7G\2\2\u018f\u0190\7P\2\2\u0190\u0191\7V\2\2\u0191\u0192"+ + "\7a\2\2\u0192\u0193\7V\2\2\u0193\u0194\7K\2\2\u0194\u0195\7O\2\2\u0195"+ + "\u0196\7G\2\2\u0196,\3\2\2\2\u0197\u0198\7E\2\2\u0198\u0199\7W\2\2\u0199"+ + "\u019a\7T\2\2\u019a\u019b\7T\2\2\u019b\u019c\7G\2\2\u019c\u019d\7P\2\2"+ + "\u019d\u019e\7V\2\2\u019e\u019f\7a\2\2\u019f\u01a0\7V\2\2\u01a0\u01a1"+ + "\7K\2\2\u01a1\u01a2\7O\2\2\u01a2\u01a3\7G\2\2\u01a3\u01a4\7U\2\2\u01a4"+ + "\u01a5\7V\2\2\u01a5\u01a6\7C\2\2\u01a6\u01a7\7O\2\2\u01a7\u01a8\7R\2\2"+ + "\u01a8.\3\2\2\2\u01a9\u01aa\7F\2\2\u01aa\u01ab\7C\2\2\u01ab\u01ac\7[\2"+ + "\2\u01ac\60\3\2\2\2\u01ad\u01ae\7F\2\2\u01ae\u01af\7C\2\2\u01af\u01b0"+ + "\7[\2\2\u01b0\u01b1\7U\2\2\u01b1\62\3\2\2\2\u01b2\u01b3\7F\2\2\u01b3\u01b4"+ + "\7G\2\2\u01b4\u01b5\7D\2\2\u01b5\u01b6\7W\2\2\u01b6\u01b7\7I\2\2\u01b7"+ + "\64\3\2\2\2\u01b8\u01b9\7F\2\2\u01b9\u01ba\7G\2\2\u01ba\u01bb\7U\2\2\u01bb"+ + "\u01bc\7E\2\2\u01bc\66\3\2\2\2\u01bd\u01be\7F\2\2\u01be\u01bf\7G\2\2\u01bf"+ + "\u01c0\7U\2\2\u01c0\u01c1\7E\2\2\u01c1\u01c2\7T\2\2\u01c2\u01c3\7K\2\2"+ + "\u01c3\u01c4\7D\2\2\u01c4\u01c5\7G\2\2\u01c58\3\2\2\2\u01c6\u01c7\7F\2"+ + "\2\u01c7\u01c8\7K\2\2\u01c8\u01c9\7U\2\2\u01c9\u01ca\7V\2\2\u01ca\u01cb"+ + "\7K\2\2\u01cb\u01cc\7P\2\2\u01cc\u01cd\7E\2\2\u01cd\u01ce\7V\2\2\u01ce"+ + ":\3\2\2\2\u01cf\u01d0\7G\2\2\u01d0\u01d1\7N\2\2\u01d1\u01d2\7U\2\2\u01d2"+ + "\u01d3\7G\2\2\u01d3<\3\2\2\2\u01d4\u01d5\7G\2\2\u01d5\u01d6\7P\2\2\u01d6"+ + "\u01d7\7F\2\2\u01d7>\3\2\2\2\u01d8\u01d9\7G\2\2\u01d9\u01da\7U\2\2\u01da"+ + "\u01db\7E\2\2\u01db\u01dc\7C\2\2\u01dc\u01dd\7R\2\2\u01dd\u01de\7G\2\2"+ + "\u01de@\3\2\2\2\u01df\u01e0\7G\2\2\u01e0\u01e1\7Z\2\2\u01e1\u01e2\7G\2"+ + "\2\u01e2\u01e3\7E\2\2\u01e3\u01e4\7W\2\2\u01e4\u01e5\7V\2\2\u01e5\u01e6"+ + "\7C\2\2\u01e6\u01e7\7D\2\2\u01e7\u01e8\7N\2\2\u01e8\u01e9\7G\2\2\u01e9"+ + "B\3\2\2\2\u01ea\u01eb\7G\2\2\u01eb\u01ec\7Z\2\2\u01ec\u01ed\7K\2\2\u01ed"+ + "\u01ee\7U\2\2\u01ee\u01ef\7V\2\2\u01ef\u01f0\7U\2\2\u01f0D\3\2\2\2\u01f1"+ + "\u01f2\7G\2\2\u01f2\u01f3\7Z\2\2\u01f3\u01f4\7R\2\2\u01f4\u01f5\7N\2\2"+ + "\u01f5\u01f6\7C\2\2\u01f6\u01f7\7K\2\2\u01f7\u01f8\7P\2\2\u01f8F\3\2\2"+ + "\2\u01f9\u01fa\7G\2\2\u01fa\u01fb\7Z\2\2\u01fb\u01fc\7V\2\2\u01fc\u01fd"+ + "\7T\2\2\u01fd\u01fe\7C\2\2\u01fe\u01ff\7E\2\2\u01ff\u0200\7V\2\2\u0200"+ + "H\3\2\2\2\u0201\u0202\7H\2\2\u0202\u0203\7C\2\2\u0203\u0204\7N\2\2\u0204"+ + "\u0205\7U\2\2\u0205\u0206\7G\2\2\u0206J\3\2\2\2\u0207\u0208\7H\2\2\u0208"+ + "\u0209\7K\2\2\u0209\u020a\7T\2\2\u020a\u020b\7U\2\2\u020b\u020c\7V\2\2"+ + "\u020cL\3\2\2\2\u020d\u020e\7H\2\2\u020e\u020f\7Q\2\2\u020f\u0210\7T\2"+ + "\2\u0210N\3\2\2\2\u0211\u0212\7H\2\2\u0212\u0213\7Q\2\2\u0213\u0214\7"+ + "T\2\2\u0214\u0215\7O\2\2\u0215\u0216\7C\2\2\u0216\u0217\7V\2\2\u0217P"+ + "\3\2\2\2\u0218\u0219\7H\2\2\u0219\u021a\7T\2\2\u021a\u021b\7Q\2\2\u021b"+ + "\u021c\7O\2\2\u021cR\3\2\2\2\u021d\u021e\7H\2\2\u021e\u021f\7T\2\2\u021f"+ + "\u0220\7Q\2\2\u0220\u0221\7\\\2\2\u0221\u0222\7G\2\2\u0222\u0223\7P\2"+ + "\2\u0223T\3\2\2\2\u0224\u0225\7H\2\2\u0225\u0226\7W\2\2\u0226\u0227\7"+ + "N\2\2\u0227\u0228\7N\2\2\u0228V\3\2\2\2\u0229\u022a\7H\2\2\u022a\u022b"+ + "\7W\2\2\u022b\u022c\7P\2\2\u022c\u022d\7E\2\2\u022d\u022e\7V\2\2\u022e"+ + "\u022f\7K\2\2\u022f\u0230\7Q\2\2\u0230\u0231\7P\2\2\u0231\u0232\7U\2\2"+ + "\u0232X\3\2\2\2\u0233\u0234\7I\2\2\u0234\u0235\7T\2\2\u0235\u0236\7C\2"+ + "\2\u0236\u0237\7R\2\2\u0237\u0238\7J\2\2\u0238\u0239\7X\2\2\u0239\u023a"+ + "\7K\2\2\u023a\u023b\7\\\2\2\u023bZ\3\2\2\2\u023c\u023d\7I\2\2\u023d\u023e"+ + "\7T\2\2\u023e\u023f\7Q\2\2\u023f\u0240\7W\2\2\u0240\u0241\7R\2\2\u0241"+ + "\\\3\2\2\2\u0242\u0243\7J\2\2\u0243\u0244\7C\2\2\u0244\u0245\7X\2\2\u0245"+ + "\u0246\7K\2\2\u0246\u0247\7P\2\2\u0247\u0248\7I\2\2\u0248^\3\2\2\2\u0249"+ + "\u024a\7J\2\2\u024a\u024b\7Q\2\2\u024b\u024c\7W\2\2\u024c\u024d\7T\2\2"+ + "\u024d`\3\2\2\2\u024e\u024f\7J\2\2\u024f\u0250\7Q\2\2\u0250\u0251\7W\2"+ + "\2\u0251\u0252\7T\2\2\u0252\u0253\7U\2\2\u0253b\3\2\2\2\u0254\u0255\7"+ + "K\2\2\u0255\u0256\7P\2\2\u0256d\3\2\2\2\u0257\u0258\7K\2\2\u0258\u0259"+ + "\7P\2\2\u0259\u025a\7E\2\2\u025a\u025b\7N\2\2\u025b\u025c\7W\2\2\u025c"+ + "\u025d\7F\2\2\u025d\u025e\7G\2\2\u025ef\3\2\2\2\u025f\u0260\7K\2\2\u0260"+ + "\u0261\7P\2\2\u0261\u0262\7P\2\2\u0262\u0263\7G\2\2\u0263\u0264\7T\2\2"+ + "\u0264h\3\2\2\2\u0265\u0266\7K\2\2\u0266\u0267\7P\2\2\u0267\u0268\7V\2"+ + "\2\u0268\u0269\7G\2\2\u0269\u026a\7T\2\2\u026a\u026b\7X\2\2\u026b\u026c"+ + "\7C\2\2\u026c\u026d\7N\2\2\u026dj\3\2\2\2\u026e\u026f\7K\2\2\u026f\u0270"+ + "\7U\2\2\u0270l\3\2\2\2\u0271\u0272\7L\2\2\u0272\u0273\7Q\2\2\u0273\u0274"+ + "\7K\2\2\u0274\u0275\7P\2\2\u0275n\3\2\2\2\u0276\u0277\7N\2\2\u0277\u0278"+ + "\7C\2\2\u0278\u0279\7U\2\2\u0279\u027a\7V\2\2\u027ap\3\2\2\2\u027b\u027c"+ + "\7N\2\2\u027c\u027d\7G\2\2\u027d\u027e\7H\2\2\u027e\u027f\7V\2\2\u027f"+ + "r\3\2\2\2\u0280\u0281\7N\2\2\u0281\u0282\7K\2\2\u0282\u0283\7M\2\2\u0283"+ + "\u0284\7G\2\2\u0284t\3\2\2\2\u0285\u0286\7N\2\2\u0286\u0287\7K\2\2\u0287"+ + "\u0288\7O\2\2\u0288\u0289\7K\2\2\u0289\u028a\7V\2\2\u028av\3\2\2\2\u028b"+ + "\u028c\7O\2\2\u028c\u028d\7C\2\2\u028d\u028e\7R\2\2\u028e\u028f\7R\2\2"+ + "\u028f\u0290\7G\2\2\u0290\u0291\7F\2\2\u0291x\3\2\2\2\u0292\u0293\7O\2"+ + "\2\u0293\u0294\7C\2\2\u0294\u0295\7V\2\2\u0295\u0296\7E\2\2\u0296\u0297"+ + "\7J\2\2\u0297z\3\2\2\2\u0298\u0299\7O\2\2\u0299\u029a\7K\2\2\u029a\u029b"+ + "\7P\2\2\u029b\u029c\7W\2\2\u029c\u029d\7V\2\2\u029d\u029e\7G\2\2\u029e"+ + "|\3\2\2\2\u029f\u02a0\7O\2\2\u02a0\u02a1\7K\2\2\u02a1\u02a2\7P\2\2\u02a2"+ + "\u02a3\7W\2\2\u02a3\u02a4\7V\2\2\u02a4\u02a5\7G\2\2\u02a5\u02a6\7U\2\2"+ + "\u02a6~\3\2\2\2\u02a7\u02a8\7O\2\2\u02a8\u02a9\7Q\2\2\u02a9\u02aa\7P\2"+ + "\2\u02aa\u02ab\7V\2\2\u02ab\u02ac\7J\2\2\u02ac\u0080\3\2\2\2\u02ad\u02ae"+ + "\7O\2\2\u02ae\u02af\7Q\2\2\u02af\u02b0\7P\2\2\u02b0\u02b1\7V\2\2\u02b1"+ + "\u02b2\7J\2\2\u02b2\u02b3\7U\2\2\u02b3\u0082\3\2\2\2\u02b4\u02b5\7P\2"+ + "\2\u02b5\u02b6\7C\2\2\u02b6\u02b7\7V\2\2\u02b7\u02b8\7W\2\2\u02b8\u02b9"+ + "\7T\2\2\u02b9\u02ba\7C\2\2\u02ba\u02bb\7N\2\2\u02bb\u0084\3\2\2\2\u02bc"+ + "\u02bd\7P\2\2\u02bd\u02be\7Q\2\2\u02be\u02bf\7V\2\2\u02bf\u0086\3\2\2"+ + "\2\u02c0\u02c1\7P\2\2\u02c1\u02c2\7W\2\2\u02c2\u02c3\7N\2\2\u02c3\u02c4"+ + "\7N\2\2\u02c4\u0088\3\2\2\2\u02c5\u02c6\7P\2\2\u02c6\u02c7\7W\2\2\u02c7"+ + "\u02c8\7N\2\2\u02c8\u02c9\7N\2\2\u02c9\u02ca\7U\2\2\u02ca\u008a\3\2\2"+ + "\2\u02cb\u02cc\7Q\2\2\u02cc\u02cd\7P\2\2\u02cd\u008c\3\2\2\2\u02ce\u02cf"+ + "\7Q\2\2\u02cf\u02d0\7R\2\2\u02d0\u02d1\7V\2\2\u02d1\u02d2\7K\2\2\u02d2"+ + "\u02d3\7O\2\2\u02d3\u02d4\7K\2\2\u02d4\u02d5\7\\\2\2\u02d5\u02d6\7G\2"+ + "\2\u02d6\u02d7\7F\2\2\u02d7\u008e\3\2\2\2\u02d8\u02d9\7Q\2\2\u02d9\u02da"+ + "\7T\2\2\u02da\u0090\3\2\2\2\u02db\u02dc\7Q\2\2\u02dc\u02dd\7T\2\2\u02dd"+ + "\u02de\7F\2\2\u02de\u02df\7G\2\2\u02df\u02e0\7T\2\2\u02e0\u0092\3\2\2"+ + "\2\u02e1\u02e2\7Q\2\2\u02e2\u02e3\7W\2\2\u02e3\u02e4\7V\2\2\u02e4\u02e5"+ + "\7G\2\2\u02e5\u02e6\7T\2\2\u02e6\u0094\3\2\2\2\u02e7\u02e8\7R\2\2\u02e8"+ + "\u02e9\7C\2\2\u02e9\u02ea\7T\2\2\u02ea\u02eb\7U\2\2\u02eb\u02ec\7G\2\2"+ + "\u02ec\u02ed\7F\2\2\u02ed\u0096\3\2\2\2\u02ee\u02ef\7R\2\2\u02ef\u02f0"+ + "\7J\2\2\u02f0\u02f1\7[\2\2\u02f1\u02f2\7U\2\2\u02f2\u02f3\7K\2\2\u02f3"+ + "\u02f4\7E\2\2\u02f4\u02f5\7C\2\2\u02f5\u02f6\7N\2\2\u02f6\u0098\3\2\2"+ + "\2\u02f7\u02f8\7R\2\2\u02f8\u02f9\7K\2\2\u02f9\u02fa\7X\2\2\u02fa\u02fb"+ + "\7Q\2\2\u02fb\u02fc\7V\2\2\u02fc\u009a\3\2\2\2\u02fd\u02fe\7R\2\2\u02fe"+ + "\u02ff\7N\2\2\u02ff\u0300\7C\2\2\u0300\u0301\7P\2\2\u0301\u009c\3\2\2"+ + "\2\u0302\u0303\7T\2\2\u0303\u0304\7K\2\2\u0304\u0305\7I\2\2\u0305\u0306"+ + "\7J\2\2\u0306\u0307\7V\2\2\u0307\u009e\3\2\2\2\u0308\u0309\7T\2\2\u0309"+ + "\u030a\7N\2\2\u030a\u030b\7K\2\2\u030b\u030c\7M\2\2\u030c\u030d\7G\2\2"+ + "\u030d\u00a0\3\2\2\2\u030e\u030f\7S\2\2\u030f\u0310\7W\2\2\u0310\u0311"+ + "\7G\2\2\u0311\u0312\7T\2\2\u0312\u0313\7[\2\2\u0313\u00a2\3\2\2\2\u0314"+ + "\u0315\7U\2\2\u0315\u0316\7E\2\2\u0316\u0317\7J\2\2\u0317\u0318\7G\2\2"+ + "\u0318\u0319\7O\2\2\u0319\u031a\7C\2\2\u031a\u031b\7U\2\2\u031b\u00a4"+ + "\3\2\2\2\u031c\u031d\7U\2\2\u031d\u031e\7G\2\2\u031e\u031f\7E\2\2\u031f"+ + "\u0320\7Q\2\2\u0320\u0321\7P\2\2\u0321\u0322\7F\2\2\u0322\u00a6\3\2\2"+ + "\2\u0323\u0324\7U\2\2\u0324\u0325\7G\2\2\u0325\u0326\7E\2\2\u0326\u0327"+ + "\7Q\2\2\u0327\u0328\7P\2\2\u0328\u0329\7F\2\2\u0329\u032a\7U\2\2\u032a"+ + "\u00a8\3\2\2\2\u032b\u032c\7U\2\2\u032c\u032d\7G\2\2\u032d\u032e\7N\2"+ + "\2\u032e\u032f\7G\2\2\u032f\u0330\7E\2\2\u0330\u0331\7V\2\2\u0331\u00aa"+ + "\3\2\2\2\u0332\u0333\7U\2\2\u0333\u0334\7J\2\2\u0334\u0335\7Q\2\2\u0335"+ + "\u0336\7Y\2\2\u0336\u00ac\3\2\2\2\u0337\u0338\7U\2\2\u0338\u0339\7[\2"+ + "\2\u0339\u033a\7U\2\2\u033a\u00ae\3\2\2\2\u033b\u033c\7V\2\2\u033c\u033d"+ + "\7C\2\2\u033d\u033e\7D\2\2\u033e\u033f\7N\2\2\u033f\u0340\7G\2\2\u0340"+ + "\u00b0\3\2\2\2\u0341\u0342\7V\2\2\u0342\u0343\7C\2\2\u0343\u0344\7D\2"+ + "\2\u0344\u0345\7N\2\2\u0345\u0346\7G\2\2\u0346\u0347\7U\2\2\u0347\u00b2"+ + "\3\2\2\2\u0348\u0349\7V\2\2\u0349\u034a\7G\2\2\u034a\u034b\7Z\2\2\u034b"+ + "\u034c\7V\2\2\u034c\u00b4\3\2\2\2\u034d\u034e\7V\2\2\u034e\u034f\7J\2"+ + "\2\u034f\u0350\7G\2\2\u0350\u0351\7P\2\2\u0351\u00b6\3\2\2\2\u0352\u0353"+ + "\7V\2\2\u0353\u0354\7T\2\2\u0354\u0355\7W\2\2\u0355\u0356\7G\2\2\u0356"+ + "\u00b8\3\2\2\2\u0357\u0358\7V\2\2\u0358\u0359\7Q\2\2\u0359\u00ba\3\2\2"+ + "\2\u035a\u035b\7V\2\2\u035b\u035c\7[\2\2\u035c\u035d\7R\2\2\u035d\u035e"+ + "\7G\2\2\u035e\u00bc\3\2\2\2\u035f\u0360\7V\2\2\u0360\u0361\7[\2\2\u0361"+ + "\u0362\7R\2\2\u0362\u0363\7G\2\2\u0363\u0364\7U\2\2\u0364\u00be\3\2\2"+ + "\2\u0365\u0366\7W\2\2\u0366\u0367\7U\2\2\u0367\u0368\7K\2\2\u0368\u0369"+ + "\7P\2\2\u0369\u036a\7I\2\2\u036a\u00c0\3\2\2\2\u036b\u036c\7X\2\2\u036c"+ + "\u036d\7G\2\2\u036d\u036e\7T\2\2\u036e\u036f\7K\2\2\u036f\u0370\7H\2\2"+ + "\u0370\u0371\7[\2\2\u0371\u00c2\3\2\2\2\u0372\u0373\7Y\2\2\u0373\u0374"+ + "\7J\2\2\u0374\u0375\7G\2\2\u0375\u0376\7P\2\2\u0376\u00c4\3\2\2\2\u0377"+ + "\u0378\7Y\2\2\u0378\u0379\7J\2\2\u0379\u037a\7G\2\2\u037a\u037b\7T\2\2"+ + "\u037b\u037c\7G\2\2\u037c\u00c6\3\2\2\2\u037d\u037e\7Y\2\2\u037e\u037f"+ + "\7K\2\2\u037f\u0380\7V\2\2\u0380\u0381\7J\2\2\u0381\u00c8\3\2\2\2\u0382"+ + "\u0383\7[\2\2\u0383\u0384\7G\2\2\u0384\u0385\7C\2\2\u0385\u0386\7T\2\2"+ + "\u0386\u00ca\3\2\2\2\u0387\u0388\7[\2\2\u0388\u0389\7G\2\2\u0389\u038a"+ + "\7C\2\2\u038a\u038b\7T\2\2\u038b\u038c\7U\2\2\u038c\u00cc\3\2\2\2\u038d"+ + "\u038e\7}\2\2\u038e\u038f\7G\2\2\u038f\u0390\7U\2\2\u0390\u0391\7E\2\2"+ + "\u0391\u0392\7C\2\2\u0392\u0393\7R\2\2\u0393\u0394\7G\2\2\u0394\u00ce"+ + "\3\2\2\2\u0395\u0396\7}\2\2\u0396\u0397\7H\2\2\u0397\u0398\7P\2\2\u0398"+ + "\u00d0\3\2\2\2\u0399\u039a\7}\2\2\u039a\u039b\7N\2\2\u039b\u039c\7K\2"+ + "\2\u039c\u039d\7O\2\2\u039d\u039e\7K\2\2\u039e\u039f\7V\2\2\u039f\u00d2"+ + "\3\2\2\2\u03a0\u03a1\7}\2\2\u03a1\u03a2\7F\2\2\u03a2\u00d4\3\2\2\2\u03a3"+ + "\u03a4\7}\2\2\u03a4\u03a5\7V\2\2\u03a5\u00d6\3\2\2\2\u03a6\u03a7\7}\2"+ + "\2\u03a7\u03a8\7V\2\2\u03a8\u03a9\7U\2\2\u03a9\u00d8\3\2\2\2\u03aa\u03ab"+ + "\7}\2\2\u03ab\u03ac\7I\2\2\u03ac\u03ad\7W\2\2\u03ad\u03ae\7K\2\2\u03ae"+ + "\u03af\7F\2\2\u03af\u00da\3\2\2\2\u03b0\u03b1\7\177\2\2\u03b1\u00dc\3"+ + "\2\2\2\u03b2\u03b3\7?\2\2\u03b3\u00de\3\2\2\2\u03b4\u03b5\7>\2\2\u03b5"+ + "\u03b6\7?\2\2\u03b6\u03b7\7@\2\2\u03b7\u00e0\3\2\2\2\u03b8\u03b9\7>\2"+ + "\2\u03b9\u03bd\7@\2\2\u03ba\u03bb\7#\2\2\u03bb\u03bd\7?\2\2\u03bc\u03b8"+ + "\3\2\2\2\u03bc\u03ba\3\2\2\2\u03bd\u00e2\3\2\2\2\u03be\u03bf\7>\2\2\u03bf"+ + "\u00e4\3\2\2\2\u03c0\u03c1\7>\2\2\u03c1\u03c2\7?\2\2\u03c2\u00e6\3\2\2"+ + "\2\u03c3\u03c4\7@\2\2\u03c4\u00e8\3\2\2\2\u03c5\u03c6\7@\2\2\u03c6\u03c7"+ + "\7?\2\2\u03c7\u00ea\3\2\2\2\u03c8\u03c9\7-\2\2\u03c9\u00ec\3\2\2\2\u03ca"+ + "\u03cb\7/\2\2\u03cb\u00ee\3\2\2\2\u03cc\u03cd\7,\2\2\u03cd\u00f0\3\2\2"+ + "\2\u03ce\u03cf\7\61\2\2\u03cf\u00f2\3\2\2\2\u03d0\u03d1\7\'\2\2\u03d1"+ + "\u00f4\3\2\2\2\u03d2\u03d3\7<\2\2\u03d3\u03d4\7<\2\2\u03d4\u00f6\3\2\2"+ + "\2\u03d5\u03d6\7~\2\2\u03d6\u03d7\7~\2\2\u03d7\u00f8\3\2\2\2\u03d8\u03d9"+ + "\7\60\2\2\u03d9\u00fa\3\2\2\2\u03da\u03db\7A\2\2\u03db\u00fc\3\2\2\2\u03dc"+ + "\u03e2\7)\2\2\u03dd\u03e1\n\2\2\2\u03de\u03df\7)\2\2\u03df\u03e1\7)\2"+ + "\2\u03e0\u03dd\3\2\2\2\u03e0\u03de\3\2\2\2\u03e1\u03e4\3\2\2\2\u03e2\u03e0"+ + "\3\2\2\2\u03e2\u03e3\3\2\2\2\u03e3\u03e5\3\2\2\2\u03e4\u03e2\3\2\2\2\u03e5"+ + "\u03e6\7)\2\2\u03e6\u00fe\3\2\2\2\u03e7\u03e9\5\u010f\u0088\2\u03e8\u03e7"+ + "\3\2\2\2\u03e9\u03ea\3\2\2\2\u03ea\u03e8\3\2\2\2\u03ea\u03eb\3\2\2\2\u03eb"+ + "\u0100\3\2\2\2\u03ec\u03ee\5\u010f\u0088\2\u03ed\u03ec\3\2\2\2\u03ee\u03ef"+ + "\3\2\2\2\u03ef\u03ed\3\2\2\2\u03ef\u03f0\3\2\2\2\u03f0\u03f1\3\2\2\2\u03f1"+ + "\u03f5\5\u00f9}\2\u03f2\u03f4\5\u010f\u0088\2\u03f3\u03f2\3\2\2\2\u03f4"+ + "\u03f7\3\2\2\2\u03f5\u03f3\3\2\2\2\u03f5\u03f6\3\2\2\2\u03f6\u0417\3\2"+ + "\2\2\u03f7\u03f5\3\2\2\2\u03f8\u03fa\5\u00f9}\2\u03f9\u03fb\5\u010f\u0088"+ + "\2\u03fa\u03f9\3\2\2\2\u03fb\u03fc\3\2\2\2\u03fc\u03fa\3\2\2\2\u03fc\u03fd"+ + "\3\2\2\2\u03fd\u0417\3\2\2\2\u03fe\u0400\5\u010f\u0088\2\u03ff\u03fe\3"+ + "\2\2\2\u0400\u0401\3\2\2\2\u0401\u03ff\3\2\2\2\u0401\u0402\3\2\2\2\u0402"+ + "\u040a\3\2\2\2\u0403\u0407\5\u00f9}\2\u0404\u0406\5\u010f\u0088\2\u0405"+ + "\u0404\3\2\2\2\u0406\u0409\3\2\2\2\u0407\u0405\3\2\2\2\u0407\u0408\3\2"+ + "\2\2\u0408\u040b\3\2\2\2\u0409\u0407\3\2\2\2\u040a\u0403\3\2\2\2\u040a"+ + "\u040b\3\2\2\2\u040b\u040c\3\2\2\2\u040c\u040d\5\u010d\u0087\2\u040d\u0417"+ + "\3\2\2\2\u040e\u0410\5\u00f9}\2\u040f\u0411\5\u010f\u0088\2\u0410\u040f"+ + "\3\2\2\2\u0411\u0412\3\2\2\2\u0412\u0410\3\2\2\2\u0412\u0413\3\2\2\2\u0413"+ + "\u0414\3\2\2\2\u0414\u0415\5\u010d\u0087\2\u0415\u0417\3\2\2\2\u0416\u03ed"+ + "\3\2\2\2\u0416\u03f8\3\2\2\2\u0416\u03ff\3\2\2\2\u0416\u040e\3\2\2\2\u0417"+ + "\u0102\3\2\2\2\u0418\u041b\5\u0111\u0089\2\u0419\u041b\7a\2\2\u041a\u0418"+ + "\3\2\2\2\u041a\u0419\3\2\2\2\u041b\u0421\3\2\2\2\u041c\u0420\5\u0111\u0089"+ + "\2\u041d\u0420\5\u010f\u0088\2\u041e\u0420\t\3\2\2\u041f\u041c\3\2\2\2"+ + "\u041f\u041d\3\2\2\2\u041f\u041e\3\2\2\2\u0420\u0423\3\2\2\2\u0421\u041f"+ + "\3\2\2\2\u0421\u0422\3\2\2\2\u0422\u0104\3\2\2\2\u0423\u0421\3\2\2\2\u0424"+ + "\u0428\5\u010f\u0088\2\u0425\u0429\5\u0111\u0089\2\u0426\u0429\5\u010f"+ + "\u0088\2\u0427\u0429\t\3\2\2\u0428\u0425\3\2\2\2\u0428\u0426\3\2\2\2\u0428"+ + "\u0427\3\2\2\2\u0429\u042a\3\2\2\2\u042a\u0428\3\2\2\2\u042a\u042b\3\2"+ + "\2\2\u042b\u0106\3\2\2\2\u042c\u0430\5\u0111\u0089\2\u042d\u0430\5\u010f"+ + "\u0088\2\u042e\u0430\7a\2\2\u042f\u042c\3\2\2\2\u042f\u042d\3\2\2\2\u042f"+ + "\u042e\3\2\2\2\u0430\u0431\3\2\2\2\u0431\u042f\3\2\2\2\u0431\u0432\3\2"+ + "\2\2\u0432\u0108\3\2\2\2\u0433\u0439\7$\2\2\u0434\u0438\n\4\2\2\u0435"+ + "\u0436\7$\2\2\u0436\u0438\7$\2\2\u0437\u0434\3\2\2\2\u0437\u0435\3\2\2"+ + "\2\u0438\u043b\3\2\2\2\u0439\u0437\3\2\2\2\u0439\u043a\3\2\2\2\u043a\u043c"+ + "\3\2\2\2\u043b\u0439\3\2\2\2\u043c\u043d\7$\2\2\u043d\u010a\3\2\2\2\u043e"+ + "\u0444\7b\2\2\u043f\u0443\n\5\2\2\u0440\u0441\7b\2\2\u0441\u0443\7b\2"+ + "\2\u0442\u043f\3\2\2\2\u0442\u0440\3\2\2\2\u0443\u0446\3\2\2\2\u0444\u0442"+ + "\3\2\2\2\u0444\u0445\3\2\2\2\u0445\u0447\3\2\2\2\u0446\u0444\3\2\2\2\u0447"+ + "\u0448\7b\2\2\u0448\u010c\3\2\2\2\u0449\u044b\7G\2\2\u044a\u044c\t\6\2"+ + "\2\u044b\u044a\3\2\2\2\u044b\u044c\3\2\2\2\u044c\u044e\3\2\2\2\u044d\u044f"+ + "\5\u010f\u0088\2\u044e\u044d\3\2\2\2\u044f\u0450\3\2\2\2\u0450\u044e\3"+ + "\2\2\2\u0450\u0451\3\2\2\2\u0451\u010e\3\2\2\2\u0452\u0453\t\7\2\2\u0453"+ + "\u0110\3\2\2\2\u0454\u0455\t\b\2\2\u0455\u0112\3\2\2\2\u0456\u0457\7/"+ + "\2\2\u0457\u0458\7/\2\2\u0458\u045c\3\2\2\2\u0459\u045b\n\t\2\2\u045a"+ + "\u0459\3\2\2\2\u045b\u045e\3\2\2\2\u045c\u045a\3\2\2\2\u045c\u045d\3\2"+ + "\2\2\u045d\u0460\3\2\2\2\u045e\u045c\3\2\2\2\u045f\u0461\7\17\2\2\u0460"+ + "\u045f\3\2\2\2\u0460\u0461\3\2\2\2\u0461\u0463\3\2\2\2\u0462\u0464\7\f"+ + "\2\2\u0463\u0462\3\2\2\2\u0463\u0464\3\2\2\2\u0464\u0465\3\2\2\2\u0465"+ + "\u0466\b\u008a\2\2\u0466\u0114\3\2\2\2\u0467\u0468\7\61\2\2\u0468\u0469"+ + "\7,\2\2\u0469\u046e\3\2\2\2\u046a\u046d\5\u0115\u008b\2\u046b\u046d\13"+ + "\2\2\2\u046c\u046a\3\2\2\2\u046c\u046b\3\2\2\2\u046d\u0470\3\2\2\2\u046e"+ + "\u046f\3\2\2\2\u046e\u046c\3\2\2\2\u046f\u0471\3\2\2\2\u0470\u046e\3\2"+ + "\2\2\u0471\u0472\7,\2\2\u0472\u0473\7\61\2\2\u0473\u0474\3\2\2\2\u0474"+ + "\u0475\b\u008b\2\2\u0475\u0116\3\2\2\2\u0476\u0478\t\n\2\2\u0477\u0476"+ + "\3\2\2\2\u0478\u0479\3\2\2\2\u0479\u0477\3\2\2\2\u0479\u047a\3\2\2\2\u047a"+ + "\u047b\3\2\2\2\u047b\u047c\b\u008c\2\2\u047c\u0118\3\2\2\2\u047d\u047e"+ + "\13\2\2\2\u047e\u011a\3\2\2\2\"\2\u03bc\u03e0\u03e2\u03ea\u03ef\u03f5"+ + "\u03fc\u0401\u0407\u040a\u0412\u0416\u041a\u041f\u0421\u0428\u042a\u042f"+ + "\u0431\u0437\u0439\u0442\u0444\u044b\u0450\u045c\u0460\u0463\u046c\u046e"+ + "\u0479\3\2\3\2"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java index c0845b7adb56..671368342e8f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java @@ -283,6 +283,16 @@ interface SqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitSetQuantifier(SqlBaseParser.SetQuantifierContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#selectItems}. + * @param ctx the parse tree + */ + void enterSelectItems(SqlBaseParser.SelectItemsContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#selectItems}. + * @param ctx the parse tree + */ + void exitSelectItems(SqlBaseParser.SelectItemsContext ctx); /** * Enter a parse tree produced by the {@code selectExpression} * labeled alternative in {@link SqlBaseParser#selectItem}. @@ -371,6 +381,36 @@ interface SqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitAliasedRelation(SqlBaseParser.AliasedRelationContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#pivotClause}. + * @param ctx the parse tree + */ + void enterPivotClause(SqlBaseParser.PivotClauseContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#pivotClause}. + * @param ctx the parse tree + */ + void exitPivotClause(SqlBaseParser.PivotClauseContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#pivotArgs}. + * @param ctx the parse tree + */ + void enterPivotArgs(SqlBaseParser.PivotArgsContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#pivotArgs}. + * @param ctx the parse tree + */ + void exitPivotArgs(SqlBaseParser.PivotArgsContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#namedValueExpression}. + * @param ctx the parse tree + */ + void enterNamedValueExpression(SqlBaseParser.NamedValueExpressionContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#namedValueExpression}. + * @param ctx the parse tree + */ + void exitNamedValueExpression(SqlBaseParser.NamedValueExpressionContext ctx); /** * Enter a parse tree produced by {@link SqlBaseParser#expression}. * @param ctx the parse tree diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java index 76e0f4654df6..63cc1bd7a3f5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java @@ -22,51 +22,54 @@ class SqlBaseParser extends Parser { COLUMNS=18, CONVERT=19, CURRENT_DATE=20, CURRENT_TIME=21, CURRENT_TIMESTAMP=22, DAY=23, DAYS=24, DEBUG=25, DESC=26, DESCRIBE=27, DISTINCT=28, ELSE=29, END=30, ESCAPE=31, EXECUTABLE=32, EXISTS=33, EXPLAIN=34, EXTRACT=35, FALSE=36, - FIRST=37, FORMAT=38, FROM=39, FROZEN=40, FULL=41, FUNCTIONS=42, GRAPHVIZ=43, - GROUP=44, HAVING=45, HOUR=46, HOURS=47, IN=48, INCLUDE=49, INNER=50, INTERVAL=51, - IS=52, JOIN=53, LAST=54, LEFT=55, LIKE=56, LIMIT=57, MAPPED=58, MATCH=59, - MINUTE=60, MINUTES=61, MONTH=62, MONTHS=63, NATURAL=64, NOT=65, NULL=66, - NULLS=67, ON=68, OPTIMIZED=69, OR=70, ORDER=71, OUTER=72, PARSED=73, PHYSICAL=74, - PLAN=75, RIGHT=76, RLIKE=77, QUERY=78, SCHEMAS=79, SECOND=80, SECONDS=81, - SELECT=82, SHOW=83, SYS=84, TABLE=85, TABLES=86, TEXT=87, THEN=88, TRUE=89, - TO=90, TYPE=91, TYPES=92, USING=93, VERIFY=94, WHEN=95, WHERE=96, WITH=97, - YEAR=98, YEARS=99, ESCAPE_ESC=100, FUNCTION_ESC=101, LIMIT_ESC=102, DATE_ESC=103, - TIME_ESC=104, TIMESTAMP_ESC=105, GUID_ESC=106, ESC_END=107, EQ=108, NULLEQ=109, - NEQ=110, LT=111, LTE=112, GT=113, GTE=114, PLUS=115, MINUS=116, ASTERISK=117, - SLASH=118, PERCENT=119, CAST_OP=120, CONCAT=121, DOT=122, PARAM=123, STRING=124, - INTEGER_VALUE=125, DECIMAL_VALUE=126, IDENTIFIER=127, DIGIT_IDENTIFIER=128, - TABLE_IDENTIFIER=129, QUOTED_IDENTIFIER=130, BACKQUOTED_IDENTIFIER=131, - SIMPLE_COMMENT=132, BRACKETED_COMMENT=133, WS=134, UNRECOGNIZED=135, DELIMITER=136; + FIRST=37, FOR=38, FORMAT=39, FROM=40, FROZEN=41, FULL=42, FUNCTIONS=43, + GRAPHVIZ=44, GROUP=45, HAVING=46, HOUR=47, HOURS=48, IN=49, INCLUDE=50, + INNER=51, INTERVAL=52, IS=53, JOIN=54, LAST=55, LEFT=56, LIKE=57, LIMIT=58, + MAPPED=59, MATCH=60, MINUTE=61, MINUTES=62, MONTH=63, MONTHS=64, NATURAL=65, + NOT=66, NULL=67, NULLS=68, ON=69, OPTIMIZED=70, OR=71, ORDER=72, OUTER=73, + PARSED=74, PHYSICAL=75, PIVOT=76, PLAN=77, RIGHT=78, RLIKE=79, QUERY=80, + SCHEMAS=81, SECOND=82, SECONDS=83, SELECT=84, SHOW=85, SYS=86, TABLE=87, + TABLES=88, TEXT=89, THEN=90, TRUE=91, TO=92, TYPE=93, TYPES=94, USING=95, + VERIFY=96, WHEN=97, WHERE=98, WITH=99, YEAR=100, YEARS=101, ESCAPE_ESC=102, + FUNCTION_ESC=103, LIMIT_ESC=104, DATE_ESC=105, TIME_ESC=106, TIMESTAMP_ESC=107, + GUID_ESC=108, ESC_END=109, EQ=110, NULLEQ=111, NEQ=112, LT=113, LTE=114, + GT=115, GTE=116, PLUS=117, MINUS=118, ASTERISK=119, SLASH=120, PERCENT=121, + CAST_OP=122, CONCAT=123, DOT=124, PARAM=125, STRING=126, INTEGER_VALUE=127, + DECIMAL_VALUE=128, IDENTIFIER=129, DIGIT_IDENTIFIER=130, TABLE_IDENTIFIER=131, + QUOTED_IDENTIFIER=132, BACKQUOTED_IDENTIFIER=133, SIMPLE_COMMENT=134, + BRACKETED_COMMENT=135, WS=136, UNRECOGNIZED=137, DELIMITER=138; public static final int RULE_singleStatement = 0, RULE_singleExpression = 1, RULE_statement = 2, RULE_query = 3, RULE_queryNoWith = 4, RULE_limitClause = 5, RULE_queryTerm = 6, RULE_orderBy = 7, RULE_querySpecification = 8, RULE_fromClause = 9, RULE_groupBy = 10, RULE_groupingElement = 11, RULE_groupingExpressions = 12, RULE_namedQuery = 13, - RULE_setQuantifier = 14, RULE_selectItem = 15, RULE_relation = 16, RULE_joinRelation = 17, - RULE_joinType = 18, RULE_joinCriteria = 19, RULE_relationPrimary = 20, - RULE_expression = 21, RULE_booleanExpression = 22, RULE_matchQueryOptions = 23, - RULE_predicated = 24, RULE_predicate = 25, RULE_likePattern = 26, RULE_pattern = 27, - RULE_patternEscape = 28, RULE_valueExpression = 29, RULE_primaryExpression = 30, - RULE_builtinDateTimeFunction = 31, RULE_castExpression = 32, RULE_castTemplate = 33, - RULE_convertTemplate = 34, RULE_extractExpression = 35, RULE_extractTemplate = 36, - RULE_functionExpression = 37, RULE_functionTemplate = 38, RULE_functionName = 39, - RULE_constant = 40, RULE_comparisonOperator = 41, RULE_booleanValue = 42, - RULE_interval = 43, RULE_intervalField = 44, RULE_dataType = 45, RULE_qualifiedName = 46, - RULE_identifier = 47, RULE_tableIdentifier = 48, RULE_quoteIdentifier = 49, - RULE_unquoteIdentifier = 50, RULE_number = 51, RULE_string = 52, RULE_whenClause = 53, - RULE_nonReserved = 54; + RULE_setQuantifier = 14, RULE_selectItems = 15, RULE_selectItem = 16, + RULE_relation = 17, RULE_joinRelation = 18, RULE_joinType = 19, RULE_joinCriteria = 20, + RULE_relationPrimary = 21, RULE_pivotClause = 22, RULE_pivotArgs = 23, + RULE_namedValueExpression = 24, RULE_expression = 25, RULE_booleanExpression = 26, + RULE_matchQueryOptions = 27, RULE_predicated = 28, RULE_predicate = 29, + RULE_likePattern = 30, RULE_pattern = 31, RULE_patternEscape = 32, RULE_valueExpression = 33, + RULE_primaryExpression = 34, RULE_builtinDateTimeFunction = 35, RULE_castExpression = 36, + RULE_castTemplate = 37, RULE_convertTemplate = 38, RULE_extractExpression = 39, + RULE_extractTemplate = 40, RULE_functionExpression = 41, RULE_functionTemplate = 42, + RULE_functionName = 43, RULE_constant = 44, RULE_comparisonOperator = 45, + RULE_booleanValue = 46, RULE_interval = 47, RULE_intervalField = 48, RULE_dataType = 49, + RULE_qualifiedName = 50, RULE_identifier = 51, RULE_tableIdentifier = 52, + RULE_quoteIdentifier = 53, RULE_unquoteIdentifier = 54, RULE_number = 55, + RULE_string = 56, RULE_whenClause = 57, RULE_nonReserved = 58; public static final String[] ruleNames = { "singleStatement", "singleExpression", "statement", "query", "queryNoWith", "limitClause", "queryTerm", "orderBy", "querySpecification", "fromClause", "groupBy", "groupingElement", "groupingExpressions", "namedQuery", "setQuantifier", - "selectItem", "relation", "joinRelation", "joinType", "joinCriteria", - "relationPrimary", "expression", "booleanExpression", "matchQueryOptions", - "predicated", "predicate", "likePattern", "pattern", "patternEscape", - "valueExpression", "primaryExpression", "builtinDateTimeFunction", "castExpression", - "castTemplate", "convertTemplate", "extractExpression", "extractTemplate", - "functionExpression", "functionTemplate", "functionName", "constant", - "comparisonOperator", "booleanValue", "interval", "intervalField", "dataType", - "qualifiedName", "identifier", "tableIdentifier", "quoteIdentifier", "unquoteIdentifier", + "selectItems", "selectItem", "relation", "joinRelation", "joinType", "joinCriteria", + "relationPrimary", "pivotClause", "pivotArgs", "namedValueExpression", + "expression", "booleanExpression", "matchQueryOptions", "predicated", + "predicate", "likePattern", "pattern", "patternEscape", "valueExpression", + "primaryExpression", "builtinDateTimeFunction", "castExpression", "castTemplate", + "convertTemplate", "extractExpression", "extractTemplate", "functionExpression", + "functionTemplate", "functionName", "constant", "comparisonOperator", + "booleanValue", "interval", "intervalField", "dataType", "qualifiedName", + "identifier", "tableIdentifier", "quoteIdentifier", "unquoteIdentifier", "number", "string", "whenClause", "nonReserved" }; @@ -76,40 +79,40 @@ class SqlBaseParser extends Parser { "'CATALOG'", "'CATALOGS'", "'COLUMNS'", "'CONVERT'", "'CURRENT_DATE'", "'CURRENT_TIME'", "'CURRENT_TIMESTAMP'", "'DAY'", "'DAYS'", "'DEBUG'", "'DESC'", "'DESCRIBE'", "'DISTINCT'", "'ELSE'", "'END'", "'ESCAPE'", "'EXECUTABLE'", - "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", "'FORMAT'", + "'EXISTS'", "'EXPLAIN'", "'EXTRACT'", "'FALSE'", "'FIRST'", "'FOR'", "'FORMAT'", "'FROM'", "'FROZEN'", "'FULL'", "'FUNCTIONS'", "'GRAPHVIZ'", "'GROUP'", "'HAVING'", "'HOUR'", "'HOURS'", "'IN'", "'INCLUDE'", "'INNER'", "'INTERVAL'", "'IS'", "'JOIN'", "'LAST'", "'LEFT'", "'LIKE'", "'LIMIT'", "'MAPPED'", "'MATCH'", "'MINUTE'", "'MINUTES'", "'MONTH'", "'MONTHS'", "'NATURAL'", "'NOT'", "'NULL'", "'NULLS'", "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", - "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PLAN'", "'RIGHT'", "'RLIKE'", "'QUERY'", - "'SCHEMAS'", "'SECOND'", "'SECONDS'", "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", - "'TABLES'", "'TEXT'", "'THEN'", "'TRUE'", "'TO'", "'TYPE'", "'TYPES'", - "'USING'", "'VERIFY'", "'WHEN'", "'WHERE'", "'WITH'", "'YEAR'", "'YEARS'", - "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'", "'{TS'", "'{GUID'", - "'}'", "'='", "'<=>'", null, "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", - "'*'", "'/'", "'%'", "'::'", "'||'", "'.'", "'?'" + "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PIVOT'", "'PLAN'", "'RIGHT'", "'RLIKE'", + "'QUERY'", "'SCHEMAS'", "'SECOND'", "'SECONDS'", "'SELECT'", "'SHOW'", + "'SYS'", "'TABLE'", "'TABLES'", "'TEXT'", "'THEN'", "'TRUE'", "'TO'", + "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", "'WHEN'", "'WHERE'", "'WITH'", + "'YEAR'", "'YEARS'", "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'", + "'{TS'", "'{GUID'", "'}'", "'='", "'<=>'", null, "'<'", "'<='", "'>'", + "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", "'::'", "'||'", "'.'", "'?'" }; private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", "AS", "ASC", "BETWEEN", "BY", "CASE", "CAST", "CATALOG", "CATALOGS", "COLUMNS", "CONVERT", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP", "DAY", "DAYS", "DEBUG", "DESC", "DESCRIBE", "DISTINCT", "ELSE", "END", "ESCAPE", - "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FORMAT", - "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", "HAVING", - "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", "JOIN", "LAST", - "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", "MINUTES", "MONTH", - "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", "OPTIMIZED", "OR", - "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", - "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", - "TEXT", "THEN", "TRUE", "TO", "TYPE", "TYPES", "USING", "VERIFY", "WHEN", - "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", "FUNCTION_ESC", "LIMIT_ESC", - "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", "GUID_ESC", "ESC_END", "EQ", - "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "CAST_OP", "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", - "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", - "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", - "WS", "UNRECOGNIZED", "DELIMITER" + "EXECUTABLE", "EXISTS", "EXPLAIN", "EXTRACT", "FALSE", "FIRST", "FOR", + "FORMAT", "FROM", "FROZEN", "FULL", "FUNCTIONS", "GRAPHVIZ", "GROUP", + "HAVING", "HOUR", "HOURS", "IN", "INCLUDE", "INNER", "INTERVAL", "IS", + "JOIN", "LAST", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "MINUTE", + "MINUTES", "MONTH", "MONTHS", "NATURAL", "NOT", "NULL", "NULLS", "ON", + "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PIVOT", "PLAN", + "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SECOND", "SECONDS", "SELECT", "SHOW", + "SYS", "TABLE", "TABLES", "TEXT", "THEN", "TRUE", "TO", "TYPE", "TYPES", + "USING", "VERIFY", "WHEN", "WHERE", "WITH", "YEAR", "YEARS", "ESCAPE_ESC", + "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", + "GUID_ESC", "ESC_END", "EQ", "NULLEQ", "NEQ", "LT", "LTE", "GT", "GTE", + "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CAST_OP", "CONCAT", + "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", + "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", + "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED", "DELIMITER" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -190,9 +193,9 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(110); + setState(118); statement(); - setState(111); + setState(119); match(EOF); } } @@ -237,9 +240,9 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(113); + setState(121); expression(); - setState(114); + setState(122); match(EOF); } } @@ -606,14 +609,14 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 4, RULE_statement); int _la; try { - setState(229); + setState(237); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { case 1: _localctx = new StatementDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(116); + setState(124); query(); } break; @@ -621,27 +624,27 @@ class SqlBaseParser extends Parser { _localctx = new ExplainContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(117); + setState(125); match(EXPLAIN); - setState(131); + setState(139); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: { - setState(118); + setState(126); match(T__0); - setState(127); + setState(135); _errHandler.sync(this); _la = _input.LA(1); - while (((((_la - 38)) & ~0x3f) == 0 && ((1L << (_la - 38)) & ((1L << (FORMAT - 38)) | (1L << (PLAN - 38)) | (1L << (VERIFY - 38)))) != 0)) { + while (((((_la - 39)) & ~0x3f) == 0 && ((1L << (_la - 39)) & ((1L << (FORMAT - 39)) | (1L << (PLAN - 39)) | (1L << (VERIFY - 39)))) != 0)) { { - setState(125); + setState(133); switch (_input.LA(1)) { case PLAN: { - setState(119); + setState(127); match(PLAN); - setState(120); + setState(128); ((ExplainContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ALL) | (1L << ANALYZED) | (1L << EXECUTABLE) | (1L << MAPPED))) != 0) || _la==OPTIMIZED || _la==PARSED) ) { @@ -653,9 +656,9 @@ class SqlBaseParser extends Parser { break; case FORMAT: { - setState(121); + setState(129); match(FORMAT); - setState(122); + setState(130); ((ExplainContext)_localctx).format = _input.LT(1); _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { @@ -667,9 +670,9 @@ class SqlBaseParser extends Parser { break; case VERIFY: { - setState(123); + setState(131); match(VERIFY); - setState(124); + setState(132); ((ExplainContext)_localctx).verify = booleanValue(); } break; @@ -677,16 +680,16 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } } - setState(129); + setState(137); _errHandler.sync(this); _la = _input.LA(1); } - setState(130); + setState(138); match(T__1); } break; } - setState(133); + setState(141); statement(); } break; @@ -694,27 +697,27 @@ class SqlBaseParser extends Parser { _localctx = new DebugContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(134); + setState(142); match(DEBUG); - setState(146); + setState(154); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { case 1: { - setState(135); + setState(143); match(T__0); - setState(142); + setState(150); _errHandler.sync(this); _la = _input.LA(1); while (_la==FORMAT || _la==PLAN) { { - setState(140); + setState(148); switch (_input.LA(1)) { case PLAN: { - setState(136); + setState(144); match(PLAN); - setState(137); + setState(145); ((DebugContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !(_la==ANALYZED || _la==OPTIMIZED) ) { @@ -726,9 +729,9 @@ class SqlBaseParser extends Parser { break; case FORMAT: { - setState(138); + setState(146); match(FORMAT); - setState(139); + setState(147); ((DebugContext)_localctx).format = _input.LT(1); _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { @@ -742,16 +745,16 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } } - setState(144); + setState(152); _errHandler.sync(this); _la = _input.LA(1); } - setState(145); + setState(153); match(T__1); } break; } - setState(148); + setState(156); statement(); } break; @@ -759,26 +762,26 @@ class SqlBaseParser extends Parser { _localctx = new ShowTablesContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(149); + setState(157); match(SHOW); - setState(150); + setState(158); match(TABLES); - setState(153); + setState(161); _la = _input.LA(1); if (_la==INCLUDE) { { - setState(151); + setState(159); match(INCLUDE); - setState(152); + setState(160); match(FROZEN); } } - setState(157); + setState(165); switch (_input.LA(1)) { case LIKE: { - setState(155); + setState(163); ((ShowTablesContext)_localctx).tableLike = likePattern(); } break; @@ -808,6 +811,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RLIKE: case QUERY: @@ -827,7 +831,7 @@ class SqlBaseParser extends Parser { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(156); + setState(164); ((ShowTablesContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -842,33 +846,33 @@ class SqlBaseParser extends Parser { _localctx = new ShowColumnsContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(159); + setState(167); match(SHOW); - setState(160); + setState(168); match(COLUMNS); - setState(163); + setState(171); _la = _input.LA(1); if (_la==INCLUDE) { { - setState(161); + setState(169); match(INCLUDE); - setState(162); + setState(170); match(FROZEN); } } - setState(165); + setState(173); _la = _input.LA(1); if ( !(_la==FROM || _la==IN) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(168); + setState(176); switch (_input.LA(1)) { case LIKE: { - setState(166); + setState(174); ((ShowColumnsContext)_localctx).tableLike = likePattern(); } break; @@ -898,6 +902,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RLIKE: case QUERY: @@ -917,7 +922,7 @@ class SqlBaseParser extends Parser { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(167); + setState(175); ((ShowColumnsContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -930,29 +935,29 @@ class SqlBaseParser extends Parser { _localctx = new ShowColumnsContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(170); + setState(178); _la = _input.LA(1); if ( !(_la==DESC || _la==DESCRIBE) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(173); + setState(181); _la = _input.LA(1); if (_la==INCLUDE) { { - setState(171); + setState(179); match(INCLUDE); - setState(172); + setState(180); match(FROZEN); } } - setState(177); + setState(185); switch (_input.LA(1)) { case LIKE: { - setState(175); + setState(183); ((ShowColumnsContext)_localctx).tableLike = likePattern(); } break; @@ -982,6 +987,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RLIKE: case QUERY: @@ -1001,7 +1007,7 @@ class SqlBaseParser extends Parser { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(176); + setState(184); ((ShowColumnsContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -1014,15 +1020,15 @@ class SqlBaseParser extends Parser { _localctx = new ShowFunctionsContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(179); + setState(187); match(SHOW); - setState(180); + setState(188); match(FUNCTIONS); - setState(182); + setState(190); _la = _input.LA(1); if (_la==LIKE) { { - setState(181); + setState(189); likePattern(); } } @@ -1033,9 +1039,9 @@ class SqlBaseParser extends Parser { _localctx = new ShowSchemasContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(184); + setState(192); match(SHOW); - setState(185); + setState(193); match(SCHEMAS); } break; @@ -1043,58 +1049,58 @@ class SqlBaseParser extends Parser { _localctx = new SysTablesContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(186); + setState(194); match(SYS); - setState(187); + setState(195); match(TABLES); - setState(190); + setState(198); _la = _input.LA(1); if (_la==CATALOG) { { - setState(188); + setState(196); match(CATALOG); - setState(189); + setState(197); ((SysTablesContext)_localctx).clusterLike = likePattern(); } } - setState(194); + setState(202); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: { - setState(192); + setState(200); ((SysTablesContext)_localctx).tableLike = likePattern(); } break; case 2: { - setState(193); + setState(201); ((SysTablesContext)_localctx).tableIdent = tableIdentifier(); } break; } - setState(205); + setState(213); _la = _input.LA(1); if (_la==TYPE) { { - setState(196); + setState(204); match(TYPE); - setState(197); + setState(205); string(); - setState(202); + setState(210); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(198); + setState(206); match(T__2); - setState(199); + setState(207); string(); } } - setState(204); + setState(212); _errHandler.sync(this); _la = _input.LA(1); } @@ -1107,28 +1113,28 @@ class SqlBaseParser extends Parser { _localctx = new SysColumnsContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(207); + setState(215); match(SYS); - setState(208); + setState(216); match(COLUMNS); - setState(211); + setState(219); _la = _input.LA(1); if (_la==CATALOG) { { - setState(209); + setState(217); match(CATALOG); - setState(210); + setState(218); ((SysColumnsContext)_localctx).cluster = string(); } } - setState(216); + setState(224); switch (_input.LA(1)) { case TABLE: { - setState(213); + setState(221); match(TABLE); - setState(214); + setState(222); ((SysColumnsContext)_localctx).tableLike = likePattern(); } break; @@ -1158,6 +1164,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RLIKE: case QUERY: @@ -1177,7 +1184,7 @@ class SqlBaseParser extends Parser { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: { - setState(215); + setState(223); ((SysColumnsContext)_localctx).tableIdent = tableIdentifier(); } break; @@ -1187,11 +1194,11 @@ class SqlBaseParser extends Parser { default: throw new NoViableAltException(this); } - setState(219); + setState(227); _la = _input.LA(1); if (_la==LIKE) { { - setState(218); + setState(226); ((SysColumnsContext)_localctx).columnPattern = likePattern(); } } @@ -1202,19 +1209,19 @@ class SqlBaseParser extends Parser { _localctx = new SysTypesContext(_localctx); enterOuterAlt(_localctx, 11); { - setState(221); + setState(229); match(SYS); - setState(222); + setState(230); match(TYPES); - setState(227); + setState(235); _la = _input.LA(1); - if (((((_la - 115)) & ~0x3f) == 0 && ((1L << (_la - 115)) & ((1L << (PLUS - 115)) | (1L << (MINUS - 115)) | (1L << (INTEGER_VALUE - 115)) | (1L << (DECIMAL_VALUE - 115)))) != 0)) { + if (((((_la - 117)) & ~0x3f) == 0 && ((1L << (_la - 117)) & ((1L << (PLUS - 117)) | (1L << (MINUS - 117)) | (1L << (INTEGER_VALUE - 117)) | (1L << (DECIMAL_VALUE - 117)))) != 0)) { { - setState(224); + setState(232); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(223); + setState(231); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -1224,7 +1231,7 @@ class SqlBaseParser extends Parser { } } - setState(226); + setState(234); ((SysTypesContext)_localctx).type = number(); } } @@ -1281,34 +1288,34 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(240); + setState(248); _la = _input.LA(1); if (_la==WITH) { { - setState(231); + setState(239); match(WITH); - setState(232); + setState(240); namedQuery(); - setState(237); + setState(245); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(233); + setState(241); match(T__2); - setState(234); + setState(242); namedQuery(); } } - setState(239); + setState(247); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(242); + setState(250); queryNoWith(); } } @@ -1364,42 +1371,42 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(244); + setState(252); queryTerm(); - setState(255); + setState(263); _la = _input.LA(1); if (_la==ORDER) { { - setState(245); + setState(253); match(ORDER); - setState(246); + setState(254); match(BY); - setState(247); + setState(255); orderBy(); - setState(252); + setState(260); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(248); + setState(256); match(T__2); - setState(249); + setState(257); orderBy(); } } - setState(254); + setState(262); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(258); + setState(266); _la = _input.LA(1); if (_la==LIMIT || _la==LIMIT_ESC) { { - setState(257); + setState(265); limitClause(); } } @@ -1448,14 +1455,14 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 10, RULE_limitClause); int _la; try { - setState(265); + setState(273); switch (_input.LA(1)) { case LIMIT: enterOuterAlt(_localctx, 1); { - setState(260); + setState(268); match(LIMIT); - setState(261); + setState(269); ((LimitClauseContext)_localctx).limit = _input.LT(1); _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { @@ -1468,9 +1475,9 @@ class SqlBaseParser extends Parser { case LIMIT_ESC: enterOuterAlt(_localctx, 2); { - setState(262); + setState(270); match(LIMIT_ESC); - setState(263); + setState(271); ((LimitClauseContext)_localctx).limit = _input.LT(1); _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { @@ -1478,7 +1485,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(264); + setState(272); match(ESC_END); } break; @@ -1551,13 +1558,13 @@ class SqlBaseParser extends Parser { QueryTermContext _localctx = new QueryTermContext(_ctx, getState()); enterRule(_localctx, 12, RULE_queryTerm); try { - setState(272); + setState(280); switch (_input.LA(1)) { case SELECT: _localctx = new QueryPrimaryDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(267); + setState(275); querySpecification(); } break; @@ -1565,11 +1572,11 @@ class SqlBaseParser extends Parser { _localctx = new SubqueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(268); + setState(276); match(T__0); - setState(269); + setState(277); queryNoWith(); - setState(270); + setState(278); match(T__1); } break; @@ -1625,13 +1632,13 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(274); + setState(282); expression(); - setState(276); + setState(284); _la = _input.LA(1); if (_la==ASC || _la==DESC) { { - setState(275); + setState(283); ((OrderByContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -1642,13 +1649,13 @@ class SqlBaseParser extends Parser { } } - setState(280); + setState(288); _la = _input.LA(1); if (_la==NULLS) { { - setState(278); + setState(286); match(NULLS); - setState(279); + setState(287); ((OrderByContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -1676,11 +1683,8 @@ class SqlBaseParser extends Parser { public BooleanExpressionContext where; public BooleanExpressionContext having; public TerminalNode SELECT() { return getToken(SqlBaseParser.SELECT, 0); } - public List selectItem() { - return getRuleContexts(SelectItemContext.class); - } - public SelectItemContext selectItem(int i) { - return getRuleContext(SelectItemContext.class,i); + public SelectItemsContext selectItems() { + return getRuleContext(SelectItemsContext.class,0); } public SetQuantifierContext setQuantifier() { return getRuleContext(SetQuantifierContext.class,0); @@ -1727,75 +1731,59 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(282); + setState(290); match(SELECT); - setState(284); + setState(292); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(283); + setState(291); setQuantifier(); } } - setState(286); - selectItem(); - setState(291); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==T__2) { - { - { - setState(287); - match(T__2); - setState(288); - selectItem(); - } - } - setState(293); - _errHandler.sync(this); - _la = _input.LA(1); - } - setState(295); + setState(294); + selectItems(); + setState(296); _la = _input.LA(1); if (_la==FROM) { { - setState(294); + setState(295); fromClause(); } } - setState(299); + setState(300); _la = _input.LA(1); if (_la==WHERE) { { - setState(297); - match(WHERE); setState(298); + match(WHERE); + setState(299); ((QuerySpecificationContext)_localctx).where = booleanExpression(0); } } - setState(304); + setState(305); _la = _input.LA(1); if (_la==GROUP) { { - setState(301); - match(GROUP); setState(302); - match(BY); + match(GROUP); setState(303); + match(BY); + setState(304); groupBy(); } } - setState(308); + setState(309); _la = _input.LA(1); if (_la==HAVING) { { - setState(306); - match(HAVING); setState(307); + match(HAVING); + setState(308); ((QuerySpecificationContext)_localctx).having = booleanExpression(0); } } @@ -1821,6 +1809,9 @@ class SqlBaseParser extends Parser { public RelationContext relation(int i) { return getRuleContext(RelationContext.class,i); } + public PivotClauseContext pivotClause() { + return getRuleContext(PivotClauseContext.class,0); + } public FromClauseContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @@ -1847,26 +1838,35 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(310); - match(FROM); setState(311); + match(FROM); + setState(312); relation(); - setState(316); + setState(317); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(312); - match(T__2); setState(313); + match(T__2); + setState(314); relation(); } } - setState(318); + setState(319); _errHandler.sync(this); _la = _input.LA(1); } + setState(321); + _la = _input.LA(1); + if (_la==PIVOT) { + { + setState(320); + pivotClause(); + } + } + } } catch (RecognitionException re) { @@ -1916,30 +1916,30 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(320); + setState(324); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(319); + setState(323); setQuantifier(); } } - setState(322); + setState(326); groupingElement(); - setState(327); + setState(331); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(323); + setState(327); match(T__2); - setState(324); + setState(328); groupingElement(); } } - setState(329); + setState(333); _errHandler.sync(this); _la = _input.LA(1); } @@ -1994,7 +1994,7 @@ class SqlBaseParser extends Parser { _localctx = new SingleGroupingSetContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(330); + setState(334); groupingExpressions(); } } @@ -2040,47 +2040,47 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 24, RULE_groupingExpressions); int _la; try { - setState(345); + setState(349); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(332); + setState(336); match(T__0); - setState(341); + setState(345); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & ((1L << (NOT - 65)) | (1L << (NULL - 65)) | (1L << (OPTIMIZED - 65)) | (1L << (PARSED - 65)) | (1L << (PHYSICAL - 65)) | (1L << (PLAN - 65)) | (1L << (RIGHT - 65)) | (1L << (RLIKE - 65)) | (1L << (QUERY - 65)) | (1L << (SCHEMAS - 65)) | (1L << (SECOND - 65)) | (1L << (SHOW - 65)) | (1L << (SYS - 65)) | (1L << (TABLES - 65)) | (1L << (TEXT - 65)) | (1L << (TRUE - 65)) | (1L << (TYPE - 65)) | (1L << (TYPES - 65)) | (1L << (VERIFY - 65)) | (1L << (YEAR - 65)) | (1L << (FUNCTION_ESC - 65)) | (1L << (DATE_ESC - 65)) | (1L << (TIME_ESC - 65)) | (1L << (TIMESTAMP_ESC - 65)) | (1L << (GUID_ESC - 65)) | (1L << (PLUS - 65)) | (1L << (MINUS - 65)) | (1L << (ASTERISK - 65)) | (1L << (PARAM - 65)) | (1L << (STRING - 65)) | (1L << (INTEGER_VALUE - 65)) | (1L << (DECIMAL_VALUE - 65)) | (1L << (IDENTIFIER - 65)) | (1L << (DIGIT_IDENTIFIER - 65)))) != 0) || _la==QUOTED_IDENTIFIER || _la==BACKQUOTED_IDENTIFIER) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & ((1L << (NOT - 66)) | (1L << (NULL - 66)) | (1L << (OPTIMIZED - 66)) | (1L << (PARSED - 66)) | (1L << (PHYSICAL - 66)) | (1L << (PIVOT - 66)) | (1L << (PLAN - 66)) | (1L << (RIGHT - 66)) | (1L << (RLIKE - 66)) | (1L << (QUERY - 66)) | (1L << (SCHEMAS - 66)) | (1L << (SECOND - 66)) | (1L << (SHOW - 66)) | (1L << (SYS - 66)) | (1L << (TABLES - 66)) | (1L << (TEXT - 66)) | (1L << (TRUE - 66)) | (1L << (TYPE - 66)) | (1L << (TYPES - 66)) | (1L << (VERIFY - 66)) | (1L << (YEAR - 66)) | (1L << (FUNCTION_ESC - 66)) | (1L << (DATE_ESC - 66)) | (1L << (TIME_ESC - 66)) | (1L << (TIMESTAMP_ESC - 66)) | (1L << (GUID_ESC - 66)) | (1L << (PLUS - 66)) | (1L << (MINUS - 66)) | (1L << (ASTERISK - 66)) | (1L << (PARAM - 66)) | (1L << (STRING - 66)) | (1L << (INTEGER_VALUE - 66)) | (1L << (DECIMAL_VALUE - 66)) | (1L << (IDENTIFIER - 66)))) != 0) || ((((_la - 130)) & ~0x3f) == 0 && ((1L << (_la - 130)) & ((1L << (DIGIT_IDENTIFIER - 130)) | (1L << (QUOTED_IDENTIFIER - 130)) | (1L << (BACKQUOTED_IDENTIFIER - 130)))) != 0)) { { - setState(333); + setState(337); expression(); - setState(338); + setState(342); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(334); + setState(338); match(T__2); - setState(335); + setState(339); expression(); } } - setState(340); + setState(344); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(343); + setState(347); match(T__1); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(344); + setState(348); expression(); } break; @@ -2131,15 +2131,15 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(347); - ((NamedQueryContext)_localctx).name = identifier(); - setState(348); - match(AS); - setState(349); - match(T__0); - setState(350); - queryNoWith(); setState(351); + ((NamedQueryContext)_localctx).name = identifier(); + setState(352); + match(AS); + setState(353); + match(T__0); + setState(354); + queryNoWith(); + setState(355); match(T__1); } } @@ -2183,7 +2183,7 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(353); + setState(357); _la = _input.LA(1); if ( !(_la==ALL || _la==DISTINCT) ) { _errHandler.recoverInline(this); @@ -2203,6 +2203,70 @@ class SqlBaseParser extends Parser { return _localctx; } + public static class SelectItemsContext extends ParserRuleContext { + public List selectItem() { + return getRuleContexts(SelectItemContext.class); + } + public SelectItemContext selectItem(int i) { + return getRuleContext(SelectItemContext.class,i); + } + public SelectItemsContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_selectItems; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterSelectItems(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitSelectItems(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitSelectItems(this); + else return visitor.visitChildren(this); + } + } + + public final SelectItemsContext selectItems() throws RecognitionException { + SelectItemsContext _localctx = new SelectItemsContext(_ctx, getState()); + enterRule(_localctx, 30, RULE_selectItems); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(359); + selectItem(); + setState(364); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la==T__2) { + { + { + setState(360); + match(T__2); + setState(361); + selectItem(); + } + } + setState(366); + _errHandler.sync(this); + _la = _input.LA(1); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + public static class SelectItemContext extends ParserRuleContext { public SelectItemContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -2240,29 +2304,29 @@ class SqlBaseParser extends Parser { public final SelectItemContext selectItem() throws RecognitionException { SelectItemContext _localctx = new SelectItemContext(_ctx, getState()); - enterRule(_localctx, 30, RULE_selectItem); + enterRule(_localctx, 32, RULE_selectItem); int _la; try { _localctx = new SelectExpressionContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(355); + setState(367); expression(); - setState(360); + setState(372); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: { - setState(357); + setState(369); _la = _input.LA(1); if (_la==AS) { { - setState(356); + setState(368); match(AS); } } - setState(359); + setState(371); identifier(); } break; @@ -2311,24 +2375,24 @@ class SqlBaseParser extends Parser { public final RelationContext relation() throws RecognitionException { RelationContext _localctx = new RelationContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_relation); + enterRule(_localctx, 34, RULE_relation); int _la; try { enterOuterAlt(_localctx, 1); { - setState(362); + setState(374); relationPrimary(); - setState(366); + setState(378); _errHandler.sync(this); _la = _input.LA(1); - while (((((_la - 41)) & ~0x3f) == 0 && ((1L << (_la - 41)) & ((1L << (FULL - 41)) | (1L << (INNER - 41)) | (1L << (JOIN - 41)) | (1L << (LEFT - 41)) | (1L << (NATURAL - 41)) | (1L << (RIGHT - 41)))) != 0)) { + while (((((_la - 42)) & ~0x3f) == 0 && ((1L << (_la - 42)) & ((1L << (FULL - 42)) | (1L << (INNER - 42)) | (1L << (JOIN - 42)) | (1L << (LEFT - 42)) | (1L << (NATURAL - 42)) | (1L << (RIGHT - 42)))) != 0)) { { { - setState(363); + setState(375); joinRelation(); } } - setState(368); + setState(380); _errHandler.sync(this); _la = _input.LA(1); } @@ -2379,10 +2443,10 @@ class SqlBaseParser extends Parser { public final JoinRelationContext joinRelation() throws RecognitionException { JoinRelationContext _localctx = new JoinRelationContext(_ctx, getState()); - enterRule(_localctx, 34, RULE_joinRelation); + enterRule(_localctx, 36, RULE_joinRelation); int _la; try { - setState(380); + setState(392); switch (_input.LA(1)) { case FULL: case INNER: @@ -2392,18 +2456,18 @@ class SqlBaseParser extends Parser { enterOuterAlt(_localctx, 1); { { - setState(369); + setState(381); joinType(); } - setState(370); + setState(382); match(JOIN); - setState(371); + setState(383); ((JoinRelationContext)_localctx).right = relationPrimary(); - setState(373); + setState(385); _la = _input.LA(1); if (_la==ON || _la==USING) { { - setState(372); + setState(384); joinCriteria(); } } @@ -2413,13 +2477,13 @@ class SqlBaseParser extends Parser { case NATURAL: enterOuterAlt(_localctx, 2); { - setState(375); + setState(387); match(NATURAL); - setState(376); + setState(388); joinType(); - setState(377); + setState(389); match(JOIN); - setState(378); + setState(390); ((JoinRelationContext)_localctx).right = relationPrimary(); } break; @@ -2465,20 +2529,20 @@ class SqlBaseParser extends Parser { public final JoinTypeContext joinType() throws RecognitionException { JoinTypeContext _localctx = new JoinTypeContext(_ctx, getState()); - enterRule(_localctx, 36, RULE_joinType); + enterRule(_localctx, 38, RULE_joinType); int _la; try { - setState(397); + setState(409); switch (_input.LA(1)) { case INNER: case JOIN: enterOuterAlt(_localctx, 1); { - setState(383); + setState(395); _la = _input.LA(1); if (_la==INNER) { { - setState(382); + setState(394); match(INNER); } } @@ -2488,13 +2552,13 @@ class SqlBaseParser extends Parser { case LEFT: enterOuterAlt(_localctx, 2); { - setState(385); + setState(397); match(LEFT); - setState(387); + setState(399); _la = _input.LA(1); if (_la==OUTER) { { - setState(386); + setState(398); match(OUTER); } } @@ -2504,13 +2568,13 @@ class SqlBaseParser extends Parser { case RIGHT: enterOuterAlt(_localctx, 3); { - setState(389); + setState(401); match(RIGHT); - setState(391); + setState(403); _la = _input.LA(1); if (_la==OUTER) { { - setState(390); + setState(402); match(OUTER); } } @@ -2520,13 +2584,13 @@ class SqlBaseParser extends Parser { case FULL: enterOuterAlt(_localctx, 4); { - setState(393); + setState(405); match(FULL); - setState(395); + setState(407); _la = _input.LA(1); if (_la==OUTER) { { - setState(394); + setState(406); match(OUTER); } } @@ -2581,46 +2645,46 @@ class SqlBaseParser extends Parser { public final JoinCriteriaContext joinCriteria() throws RecognitionException { JoinCriteriaContext _localctx = new JoinCriteriaContext(_ctx, getState()); - enterRule(_localctx, 38, RULE_joinCriteria); + enterRule(_localctx, 40, RULE_joinCriteria); int _la; try { - setState(413); + setState(425); switch (_input.LA(1)) { case ON: enterOuterAlt(_localctx, 1); { - setState(399); + setState(411); match(ON); - setState(400); + setState(412); booleanExpression(0); } break; case USING: enterOuterAlt(_localctx, 2); { - setState(401); + setState(413); match(USING); - setState(402); + setState(414); match(T__0); - setState(403); + setState(415); identifier(); - setState(408); + setState(420); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(404); + setState(416); match(T__2); - setState(405); + setState(417); identifier(); } } - setState(410); + setState(422); _errHandler.sync(this); _la = _input.LA(1); } - setState(411); + setState(423); match(T__1); } break; @@ -2723,42 +2787,42 @@ class SqlBaseParser extends Parser { public final RelationPrimaryContext relationPrimary() throws RecognitionException { RelationPrimaryContext _localctx = new RelationPrimaryContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_relationPrimary); + enterRule(_localctx, 42, RULE_relationPrimary); int _la; try { - setState(443); + setState(455); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,63,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,64,_ctx) ) { case 1: _localctx = new TableNameContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(416); + setState(428); _la = _input.LA(1); if (_la==FROZEN) { { - setState(415); + setState(427); match(FROZEN); } } - setState(418); + setState(430); tableIdentifier(); - setState(423); + setState(435); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,59,_ctx) ) { case 1: { - setState(420); + setState(432); _la = _input.LA(1); if (_la==AS) { { - setState(419); + setState(431); match(AS); } } - setState(422); + setState(434); qualifiedName(); } break; @@ -2769,27 +2833,27 @@ class SqlBaseParser extends Parser { _localctx = new AliasedQueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(425); + setState(437); match(T__0); - setState(426); + setState(438); queryNoWith(); - setState(427); + setState(439); match(T__1); - setState(432); + setState(444); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,60,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,61,_ctx) ) { case 1: { - setState(429); + setState(441); _la = _input.LA(1); if (_la==AS) { { - setState(428); + setState(440); match(AS); } } - setState(431); + setState(443); qualifiedName(); } break; @@ -2800,27 +2864,27 @@ class SqlBaseParser extends Parser { _localctx = new AliasedRelationContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(434); + setState(446); match(T__0); - setState(435); + setState(447); relation(); - setState(436); + setState(448); match(T__1); - setState(441); + setState(453); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,62,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,63,_ctx) ) { case 1: { - setState(438); + setState(450); _la = _input.LA(1); if (_la==AS) { { - setState(437); + setState(449); match(AS); } } - setState(440); + setState(452); qualifiedName(); } break; @@ -2840,6 +2904,211 @@ class SqlBaseParser extends Parser { return _localctx; } + public static class PivotClauseContext extends ParserRuleContext { + public PivotArgsContext aggs; + public QualifiedNameContext column; + public PivotArgsContext vals; + public TerminalNode PIVOT() { return getToken(SqlBaseParser.PIVOT, 0); } + public TerminalNode FOR() { return getToken(SqlBaseParser.FOR, 0); } + public TerminalNode IN() { return getToken(SqlBaseParser.IN, 0); } + public List pivotArgs() { + return getRuleContexts(PivotArgsContext.class); + } + public PivotArgsContext pivotArgs(int i) { + return getRuleContext(PivotArgsContext.class,i); + } + public QualifiedNameContext qualifiedName() { + return getRuleContext(QualifiedNameContext.class,0); + } + public PivotClauseContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_pivotClause; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterPivotClause(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitPivotClause(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitPivotClause(this); + else return visitor.visitChildren(this); + } + } + + public final PivotClauseContext pivotClause() throws RecognitionException { + PivotClauseContext _localctx = new PivotClauseContext(_ctx, getState()); + enterRule(_localctx, 44, RULE_pivotClause); + try { + enterOuterAlt(_localctx, 1); + { + setState(457); + match(PIVOT); + setState(458); + match(T__0); + setState(459); + ((PivotClauseContext)_localctx).aggs = pivotArgs(); + setState(460); + match(FOR); + setState(461); + ((PivotClauseContext)_localctx).column = qualifiedName(); + setState(462); + match(IN); + setState(463); + match(T__0); + setState(464); + ((PivotClauseContext)_localctx).vals = pivotArgs(); + setState(465); + match(T__1); + setState(466); + match(T__1); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class PivotArgsContext extends ParserRuleContext { + public List namedValueExpression() { + return getRuleContexts(NamedValueExpressionContext.class); + } + public NamedValueExpressionContext namedValueExpression(int i) { + return getRuleContext(NamedValueExpressionContext.class,i); + } + public PivotArgsContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_pivotArgs; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterPivotArgs(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitPivotArgs(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitPivotArgs(this); + else return visitor.visitChildren(this); + } + } + + public final PivotArgsContext pivotArgs() throws RecognitionException { + PivotArgsContext _localctx = new PivotArgsContext(_ctx, getState()); + enterRule(_localctx, 46, RULE_pivotArgs); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(468); + namedValueExpression(); + setState(473); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la==T__2) { + { + { + setState(469); + match(T__2); + setState(470); + namedValueExpression(); + } + } + setState(475); + _errHandler.sync(this); + _la = _input.LA(1); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class NamedValueExpressionContext extends ParserRuleContext { + public ValueExpressionContext valueExpression() { + return getRuleContext(ValueExpressionContext.class,0); + } + public IdentifierContext identifier() { + return getRuleContext(IdentifierContext.class,0); + } + public TerminalNode AS() { return getToken(SqlBaseParser.AS, 0); } + public NamedValueExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_namedValueExpression; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterNamedValueExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitNamedValueExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitNamedValueExpression(this); + else return visitor.visitChildren(this); + } + } + + public final NamedValueExpressionContext namedValueExpression() throws RecognitionException { + NamedValueExpressionContext _localctx = new NamedValueExpressionContext(_ctx, getState()); + enterRule(_localctx, 48, RULE_namedValueExpression); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(476); + valueExpression(0); + setState(481); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OPTIMIZED - 70)) | (1L << (PARSED - 70)) | (1L << (PHYSICAL - 70)) | (1L << (PIVOT - 70)) | (1L << (PLAN - 70)) | (1L << (RLIKE - 70)) | (1L << (QUERY - 70)) | (1L << (SCHEMAS - 70)) | (1L << (SECOND - 70)) | (1L << (SHOW - 70)) | (1L << (SYS - 70)) | (1L << (TABLES - 70)) | (1L << (TEXT - 70)) | (1L << (TYPE - 70)) | (1L << (TYPES - 70)) | (1L << (VERIFY - 70)) | (1L << (YEAR - 70)) | (1L << (IDENTIFIER - 70)) | (1L << (DIGIT_IDENTIFIER - 70)) | (1L << (QUOTED_IDENTIFIER - 70)) | (1L << (BACKQUOTED_IDENTIFIER - 70)))) != 0)) { + { + setState(478); + _la = _input.LA(1); + if (_la==AS) { + { + setState(477); + match(AS); + } + } + + setState(480); + identifier(); + } + } + + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + public static class ExpressionContext extends ParserRuleContext { public BooleanExpressionContext booleanExpression() { return getRuleContext(BooleanExpressionContext.class,0); @@ -2865,11 +3134,11 @@ class SqlBaseParser extends Parser { public final ExpressionContext expression() throws RecognitionException { ExpressionContext _localctx = new ExpressionContext(_ctx, getState()); - enterRule(_localctx, 42, RULE_expression); + enterRule(_localctx, 50, RULE_expression); try { enterOuterAlt(_localctx, 1); { - setState(445); + setState(483); booleanExpression(0); } } @@ -3071,24 +3340,24 @@ class SqlBaseParser extends Parser { int _parentState = getState(); BooleanExpressionContext _localctx = new BooleanExpressionContext(_ctx, _parentState); BooleanExpressionContext _prevctx = _localctx; - int _startState = 44; - enterRecursionRule(_localctx, 44, RULE_booleanExpression, _p); + int _startState = 52; + enterRecursionRule(_localctx, 52, RULE_booleanExpression, _p); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(478); + setState(516); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,64,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,68,_ctx) ) { case 1: { _localctx = new LogicalNotContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(448); + setState(486); match(NOT); - setState(449); + setState(487); booleanExpression(8); } break; @@ -3097,13 +3366,13 @@ class SqlBaseParser extends Parser { _localctx = new ExistsContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(450); + setState(488); match(EXISTS); - setState(451); + setState(489); match(T__0); - setState(452); + setState(490); query(); - setState(453); + setState(491); match(T__1); } break; @@ -3112,15 +3381,15 @@ class SqlBaseParser extends Parser { _localctx = new StringQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(455); + setState(493); match(QUERY); - setState(456); + setState(494); match(T__0); - setState(457); + setState(495); ((StringQueryContext)_localctx).queryString = string(); - setState(458); + setState(496); matchQueryOptions(); - setState(459); + setState(497); match(T__1); } break; @@ -3129,19 +3398,19 @@ class SqlBaseParser extends Parser { _localctx = new MatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(461); + setState(499); match(MATCH); - setState(462); + setState(500); match(T__0); - setState(463); + setState(501); ((MatchQueryContext)_localctx).singleField = qualifiedName(); - setState(464); + setState(502); match(T__2); - setState(465); + setState(503); ((MatchQueryContext)_localctx).queryString = string(); - setState(466); + setState(504); matchQueryOptions(); - setState(467); + setState(505); match(T__1); } break; @@ -3150,19 +3419,19 @@ class SqlBaseParser extends Parser { _localctx = new MultiMatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(469); + setState(507); match(MATCH); - setState(470); + setState(508); match(T__0); - setState(471); + setState(509); ((MultiMatchQueryContext)_localctx).multiFields = string(); - setState(472); + setState(510); match(T__2); - setState(473); + setState(511); ((MultiMatchQueryContext)_localctx).queryString = string(); - setState(474); + setState(512); matchQueryOptions(); - setState(475); + setState(513); match(T__1); } break; @@ -3171,33 +3440,33 @@ class SqlBaseParser extends Parser { _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(477); + setState(515); predicated(); } break; } _ctx.stop = _input.LT(-1); - setState(488); + setState(526); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,66,_ctx); + _alt = getInterpreter().adaptivePredict(_input,70,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(486); + setState(524); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,65,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,69,_ctx) ) { case 1: { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(480); + setState(518); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(481); + setState(519); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(482); + setState(520); ((LogicalBinaryContext)_localctx).right = booleanExpression(3); } break; @@ -3206,20 +3475,20 @@ class SqlBaseParser extends Parser { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(483); + setState(521); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(484); + setState(522); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(485); + setState(523); ((LogicalBinaryContext)_localctx).right = booleanExpression(2); } break; } } } - setState(490); + setState(528); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,66,_ctx); + _alt = getInterpreter().adaptivePredict(_input,70,_ctx); } } } @@ -3262,24 +3531,24 @@ class SqlBaseParser extends Parser { public final MatchQueryOptionsContext matchQueryOptions() throws RecognitionException { MatchQueryOptionsContext _localctx = new MatchQueryOptionsContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_matchQueryOptions); + enterRule(_localctx, 54, RULE_matchQueryOptions); int _la; try { enterOuterAlt(_localctx, 1); { - setState(495); + setState(533); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(491); + setState(529); match(T__2); - setState(492); + setState(530); string(); } } - setState(497); + setState(535); _errHandler.sync(this); _la = _input.LA(1); } @@ -3324,18 +3593,18 @@ class SqlBaseParser extends Parser { public final PredicatedContext predicated() throws RecognitionException { PredicatedContext _localctx = new PredicatedContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_predicated); + enterRule(_localctx, 56, RULE_predicated); try { enterOuterAlt(_localctx, 1); { - setState(498); + setState(536); valueExpression(0); - setState(500); + setState(538); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,68,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,72,_ctx) ) { case 1: { - setState(499); + setState(537); predicate(); } break; @@ -3402,145 +3671,145 @@ class SqlBaseParser extends Parser { public final PredicateContext predicate() throws RecognitionException { PredicateContext _localctx = new PredicateContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_predicate); + enterRule(_localctx, 58, RULE_predicate); int _la; try { - setState(548); + setState(586); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,76,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,80,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(503); + setState(541); _la = _input.LA(1); if (_la==NOT) { { - setState(502); + setState(540); match(NOT); } } - setState(505); + setState(543); ((PredicateContext)_localctx).kind = match(BETWEEN); - setState(506); + setState(544); ((PredicateContext)_localctx).lower = valueExpression(0); - setState(507); + setState(545); match(AND); - setState(508); + setState(546); ((PredicateContext)_localctx).upper = valueExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(511); + setState(549); _la = _input.LA(1); if (_la==NOT) { { - setState(510); + setState(548); match(NOT); } } - setState(513); + setState(551); ((PredicateContext)_localctx).kind = match(IN); - setState(514); + setState(552); match(T__0); - setState(515); + setState(553); valueExpression(0); - setState(520); + setState(558); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(516); + setState(554); match(T__2); - setState(517); + setState(555); valueExpression(0); } } - setState(522); + setState(560); _errHandler.sync(this); _la = _input.LA(1); } - setState(523); + setState(561); match(T__1); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(526); + setState(564); _la = _input.LA(1); if (_la==NOT) { { - setState(525); + setState(563); match(NOT); } } - setState(528); + setState(566); ((PredicateContext)_localctx).kind = match(IN); - setState(529); + setState(567); match(T__0); - setState(530); + setState(568); query(); - setState(531); + setState(569); match(T__1); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(534); + setState(572); _la = _input.LA(1); if (_la==NOT) { { - setState(533); + setState(571); match(NOT); } } - setState(536); + setState(574); ((PredicateContext)_localctx).kind = match(LIKE); - setState(537); + setState(575); pattern(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(539); + setState(577); _la = _input.LA(1); if (_la==NOT) { { - setState(538); + setState(576); match(NOT); } } - setState(541); + setState(579); ((PredicateContext)_localctx).kind = match(RLIKE); - setState(542); + setState(580); ((PredicateContext)_localctx).regex = string(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(543); + setState(581); match(IS); - setState(545); + setState(583); _la = _input.LA(1); if (_la==NOT) { { - setState(544); + setState(582); match(NOT); } } - setState(547); + setState(585); ((PredicateContext)_localctx).kind = match(NULL); } break; @@ -3583,13 +3852,13 @@ class SqlBaseParser extends Parser { public final LikePatternContext likePattern() throws RecognitionException { LikePatternContext _localctx = new LikePatternContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_likePattern); + enterRule(_localctx, 60, RULE_likePattern); try { enterOuterAlt(_localctx, 1); { - setState(550); + setState(588); match(LIKE); - setState(551); + setState(589); pattern(); } } @@ -3633,18 +3902,18 @@ class SqlBaseParser extends Parser { public final PatternContext pattern() throws RecognitionException { PatternContext _localctx = new PatternContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_pattern); + enterRule(_localctx, 62, RULE_pattern); try { enterOuterAlt(_localctx, 1); { - setState(553); + setState(591); ((PatternContext)_localctx).value = string(); - setState(555); + setState(593); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,77,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,81,_ctx) ) { case 1: { - setState(554); + setState(592); patternEscape(); } break; @@ -3690,27 +3959,27 @@ class SqlBaseParser extends Parser { public final PatternEscapeContext patternEscape() throws RecognitionException { PatternEscapeContext _localctx = new PatternEscapeContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_patternEscape); + enterRule(_localctx, 64, RULE_patternEscape); try { - setState(563); + setState(601); switch (_input.LA(1)) { case ESCAPE: enterOuterAlt(_localctx, 1); { - setState(557); + setState(595); match(ESCAPE); - setState(558); + setState(596); ((PatternEscapeContext)_localctx).escape = string(); } break; case ESCAPE_ESC: enterOuterAlt(_localctx, 2); { - setState(559); + setState(597); match(ESCAPE_ESC); - setState(560); + setState(598); ((PatternEscapeContext)_localctx).escape = string(); - setState(561); + setState(599); match(ESC_END); } break; @@ -3848,14 +4117,14 @@ class SqlBaseParser extends Parser { int _parentState = getState(); ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, _parentState); ValueExpressionContext _prevctx = _localctx; - int _startState = 58; - enterRecursionRule(_localctx, 58, RULE_valueExpression, _p); + int _startState = 66; + enterRecursionRule(_localctx, 66, RULE_valueExpression, _p); int _la; try { int _alt; enterOuterAlt(_localctx, 1); { - setState(569); + setState(607); switch (_input.LA(1)) { case T__0: case ANALYZE: @@ -3891,6 +4160,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RIGHT: case RLIKE: @@ -3925,7 +4195,7 @@ class SqlBaseParser extends Parser { _ctx = _localctx; _prevctx = _localctx; - setState(566); + setState(604); primaryExpression(0); } break; @@ -3935,7 +4205,7 @@ class SqlBaseParser extends Parser { _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(567); + setState(605); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3943,7 +4213,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(568); + setState(606); valueExpression(4); } break; @@ -3951,33 +4221,33 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } _ctx.stop = _input.LT(-1); - setState(583); + setState(621); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,81,_ctx); + _alt = getInterpreter().adaptivePredict(_input,85,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(581); + setState(619); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,80,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,84,_ctx) ) { case 1: { _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(571); + setState(609); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(572); + setState(610); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !(((((_la - 117)) & ~0x3f) == 0 && ((1L << (_la - 117)) & ((1L << (ASTERISK - 117)) | (1L << (SLASH - 117)) | (1L << (PERCENT - 117)))) != 0)) ) { + if ( !(((((_la - 119)) & ~0x3f) == 0 && ((1L << (_la - 119)) & ((1L << (ASTERISK - 119)) | (1L << (SLASH - 119)) | (1L << (PERCENT - 119)))) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { consume(); } - setState(573); + setState(611); ((ArithmeticBinaryContext)_localctx).right = valueExpression(4); } break; @@ -3986,9 +4256,9 @@ class SqlBaseParser extends Parser { _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(574); + setState(612); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(575); + setState(613); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3996,7 +4266,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(576); + setState(614); ((ArithmeticBinaryContext)_localctx).right = valueExpression(3); } break; @@ -4005,20 +4275,20 @@ class SqlBaseParser extends Parser { _localctx = new ComparisonContext(new ValueExpressionContext(_parentctx, _parentState)); ((ComparisonContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(577); + setState(615); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(578); + setState(616); comparisonOperator(); - setState(579); + setState(617); ((ComparisonContext)_localctx).right = valueExpression(2); } break; } } } - setState(585); + setState(623); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,81,_ctx); + _alt = getInterpreter().adaptivePredict(_input,85,_ctx); } } } @@ -4283,23 +4553,23 @@ class SqlBaseParser extends Parser { int _parentState = getState(); PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, _parentState); PrimaryExpressionContext _prevctx = _localctx; - int _startState = 60; - enterRecursionRule(_localctx, 60, RULE_primaryExpression, _p); + int _startState = 68; + enterRecursionRule(_localctx, 68, RULE_primaryExpression, _p); int _la; try { int _alt; enterOuterAlt(_localctx, 1); { - setState(622); + setState(660); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,86,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,90,_ctx) ) { case 1: { _localctx = new CastContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(587); + setState(625); castExpression(); } break; @@ -4308,7 +4578,7 @@ class SqlBaseParser extends Parser { _localctx = new ExtractContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(588); + setState(626); extractExpression(); } break; @@ -4317,7 +4587,7 @@ class SqlBaseParser extends Parser { _localctx = new CurrentDateTimeFunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(589); + setState(627); builtinDateTimeFunction(); } break; @@ -4326,7 +4596,7 @@ class SqlBaseParser extends Parser { _localctx = new ConstantDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(590); + setState(628); constant(); } break; @@ -4335,18 +4605,18 @@ class SqlBaseParser extends Parser { _localctx = new StarContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(594); + setState(632); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 69)) & ~0x3f) == 0 && ((1L << (_la - 69)) & ((1L << (OPTIMIZED - 69)) | (1L << (PARSED - 69)) | (1L << (PHYSICAL - 69)) | (1L << (PLAN - 69)) | (1L << (RLIKE - 69)) | (1L << (QUERY - 69)) | (1L << (SCHEMAS - 69)) | (1L << (SECOND - 69)) | (1L << (SHOW - 69)) | (1L << (SYS - 69)) | (1L << (TABLES - 69)) | (1L << (TEXT - 69)) | (1L << (TYPE - 69)) | (1L << (TYPES - 69)) | (1L << (VERIFY - 69)) | (1L << (YEAR - 69)) | (1L << (IDENTIFIER - 69)) | (1L << (DIGIT_IDENTIFIER - 69)) | (1L << (QUOTED_IDENTIFIER - 69)) | (1L << (BACKQUOTED_IDENTIFIER - 69)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OPTIMIZED - 70)) | (1L << (PARSED - 70)) | (1L << (PHYSICAL - 70)) | (1L << (PIVOT - 70)) | (1L << (PLAN - 70)) | (1L << (RLIKE - 70)) | (1L << (QUERY - 70)) | (1L << (SCHEMAS - 70)) | (1L << (SECOND - 70)) | (1L << (SHOW - 70)) | (1L << (SYS - 70)) | (1L << (TABLES - 70)) | (1L << (TEXT - 70)) | (1L << (TYPE - 70)) | (1L << (TYPES - 70)) | (1L << (VERIFY - 70)) | (1L << (YEAR - 70)) | (1L << (IDENTIFIER - 70)) | (1L << (DIGIT_IDENTIFIER - 70)) | (1L << (QUOTED_IDENTIFIER - 70)) | (1L << (BACKQUOTED_IDENTIFIER - 70)))) != 0)) { { - setState(591); + setState(629); qualifiedName(); - setState(592); + setState(630); match(DOT); } } - setState(596); + setState(634); match(ASTERISK); } break; @@ -4355,7 +4625,7 @@ class SqlBaseParser extends Parser { _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(597); + setState(635); functionExpression(); } break; @@ -4364,11 +4634,11 @@ class SqlBaseParser extends Parser { _localctx = new SubqueryExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(598); + setState(636); match(T__0); - setState(599); + setState(637); query(); - setState(600); + setState(638); match(T__1); } break; @@ -4377,7 +4647,7 @@ class SqlBaseParser extends Parser { _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(602); + setState(640); qualifiedName(); } break; @@ -4386,11 +4656,11 @@ class SqlBaseParser extends Parser { _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(603); + setState(641); match(T__0); - setState(604); + setState(642); expression(); - setState(605); + setState(643); match(T__1); } break; @@ -4399,51 +4669,51 @@ class SqlBaseParser extends Parser { _localctx = new CaseContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(607); + setState(645); match(CASE); - setState(609); + setState(647); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & ((1L << (NOT - 65)) | (1L << (NULL - 65)) | (1L << (OPTIMIZED - 65)) | (1L << (PARSED - 65)) | (1L << (PHYSICAL - 65)) | (1L << (PLAN - 65)) | (1L << (RIGHT - 65)) | (1L << (RLIKE - 65)) | (1L << (QUERY - 65)) | (1L << (SCHEMAS - 65)) | (1L << (SECOND - 65)) | (1L << (SHOW - 65)) | (1L << (SYS - 65)) | (1L << (TABLES - 65)) | (1L << (TEXT - 65)) | (1L << (TRUE - 65)) | (1L << (TYPE - 65)) | (1L << (TYPES - 65)) | (1L << (VERIFY - 65)) | (1L << (YEAR - 65)) | (1L << (FUNCTION_ESC - 65)) | (1L << (DATE_ESC - 65)) | (1L << (TIME_ESC - 65)) | (1L << (TIMESTAMP_ESC - 65)) | (1L << (GUID_ESC - 65)) | (1L << (PLUS - 65)) | (1L << (MINUS - 65)) | (1L << (ASTERISK - 65)) | (1L << (PARAM - 65)) | (1L << (STRING - 65)) | (1L << (INTEGER_VALUE - 65)) | (1L << (DECIMAL_VALUE - 65)) | (1L << (IDENTIFIER - 65)) | (1L << (DIGIT_IDENTIFIER - 65)))) != 0) || _la==QUOTED_IDENTIFIER || _la==BACKQUOTED_IDENTIFIER) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & ((1L << (NOT - 66)) | (1L << (NULL - 66)) | (1L << (OPTIMIZED - 66)) | (1L << (PARSED - 66)) | (1L << (PHYSICAL - 66)) | (1L << (PIVOT - 66)) | (1L << (PLAN - 66)) | (1L << (RIGHT - 66)) | (1L << (RLIKE - 66)) | (1L << (QUERY - 66)) | (1L << (SCHEMAS - 66)) | (1L << (SECOND - 66)) | (1L << (SHOW - 66)) | (1L << (SYS - 66)) | (1L << (TABLES - 66)) | (1L << (TEXT - 66)) | (1L << (TRUE - 66)) | (1L << (TYPE - 66)) | (1L << (TYPES - 66)) | (1L << (VERIFY - 66)) | (1L << (YEAR - 66)) | (1L << (FUNCTION_ESC - 66)) | (1L << (DATE_ESC - 66)) | (1L << (TIME_ESC - 66)) | (1L << (TIMESTAMP_ESC - 66)) | (1L << (GUID_ESC - 66)) | (1L << (PLUS - 66)) | (1L << (MINUS - 66)) | (1L << (ASTERISK - 66)) | (1L << (PARAM - 66)) | (1L << (STRING - 66)) | (1L << (INTEGER_VALUE - 66)) | (1L << (DECIMAL_VALUE - 66)) | (1L << (IDENTIFIER - 66)))) != 0) || ((((_la - 130)) & ~0x3f) == 0 && ((1L << (_la - 130)) & ((1L << (DIGIT_IDENTIFIER - 130)) | (1L << (QUOTED_IDENTIFIER - 130)) | (1L << (BACKQUOTED_IDENTIFIER - 130)))) != 0)) { { - setState(608); + setState(646); ((CaseContext)_localctx).operand = booleanExpression(0); } } - setState(612); + setState(650); _errHandler.sync(this); _la = _input.LA(1); do { { { - setState(611); + setState(649); whenClause(); } } - setState(614); + setState(652); _errHandler.sync(this); _la = _input.LA(1); } while ( _la==WHEN ); - setState(618); + setState(656); _la = _input.LA(1); if (_la==ELSE) { { - setState(616); + setState(654); match(ELSE); - setState(617); + setState(655); ((CaseContext)_localctx).elseClause = booleanExpression(0); } } - setState(620); + setState(658); match(END); } break; } _ctx.stop = _input.LT(-1); - setState(629); + setState(667); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,87,_ctx); + _alt = getInterpreter().adaptivePredict(_input,91,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); @@ -4452,18 +4722,18 @@ class SqlBaseParser extends Parser { { _localctx = new CastOperatorExpressionContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(624); + setState(662); if (!(precpred(_ctx, 10))) throw new FailedPredicateException(this, "precpred(_ctx, 10)"); - setState(625); + setState(663); match(CAST_OP); - setState(626); + setState(664); dataType(); } } } - setState(631); + setState(669); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,87,_ctx); + _alt = getInterpreter().adaptivePredict(_input,91,_ctx); } } } @@ -4504,28 +4774,28 @@ class SqlBaseParser extends Parser { public final BuiltinDateTimeFunctionContext builtinDateTimeFunction() throws RecognitionException { BuiltinDateTimeFunctionContext _localctx = new BuiltinDateTimeFunctionContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_builtinDateTimeFunction); + enterRule(_localctx, 70, RULE_builtinDateTimeFunction); try { - setState(635); + setState(673); switch (_input.LA(1)) { case CURRENT_TIMESTAMP: enterOuterAlt(_localctx, 1); { - setState(632); + setState(670); ((BuiltinDateTimeFunctionContext)_localctx).name = match(CURRENT_TIMESTAMP); } break; case CURRENT_DATE: enterOuterAlt(_localctx, 2); { - setState(633); + setState(671); ((BuiltinDateTimeFunctionContext)_localctx).name = match(CURRENT_DATE); } break; case CURRENT_TIME: enterOuterAlt(_localctx, 3); { - setState(634); + setState(672); ((BuiltinDateTimeFunctionContext)_localctx).name = match(CURRENT_TIME); } break; @@ -4574,44 +4844,44 @@ class SqlBaseParser extends Parser { public final CastExpressionContext castExpression() throws RecognitionException { CastExpressionContext _localctx = new CastExpressionContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_castExpression); + enterRule(_localctx, 72, RULE_castExpression); try { - setState(647); + setState(685); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,89,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,93,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(637); + setState(675); castTemplate(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(638); + setState(676); match(FUNCTION_ESC); - setState(639); + setState(677); castTemplate(); - setState(640); + setState(678); match(ESC_END); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(642); + setState(680); convertTemplate(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(643); + setState(681); match(FUNCTION_ESC); - setState(644); + setState(682); convertTemplate(); - setState(645); + setState(683); match(ESC_END); } break; @@ -4658,21 +4928,21 @@ class SqlBaseParser extends Parser { public final CastTemplateContext castTemplate() throws RecognitionException { CastTemplateContext _localctx = new CastTemplateContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_castTemplate); + enterRule(_localctx, 74, RULE_castTemplate); try { enterOuterAlt(_localctx, 1); { - setState(649); + setState(687); match(CAST); - setState(650); + setState(688); match(T__0); - setState(651); + setState(689); expression(); - setState(652); + setState(690); match(AS); - setState(653); + setState(691); dataType(); - setState(654); + setState(692); match(T__1); } } @@ -4716,21 +4986,21 @@ class SqlBaseParser extends Parser { public final ConvertTemplateContext convertTemplate() throws RecognitionException { ConvertTemplateContext _localctx = new ConvertTemplateContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_convertTemplate); + enterRule(_localctx, 76, RULE_convertTemplate); try { enterOuterAlt(_localctx, 1); { - setState(656); + setState(694); match(CONVERT); - setState(657); + setState(695); match(T__0); - setState(658); + setState(696); expression(); - setState(659); + setState(697); match(T__2); - setState(660); + setState(698); dataType(); - setState(661); + setState(699); match(T__1); } } @@ -4772,25 +5042,25 @@ class SqlBaseParser extends Parser { public final ExtractExpressionContext extractExpression() throws RecognitionException { ExtractExpressionContext _localctx = new ExtractExpressionContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_extractExpression); + enterRule(_localctx, 78, RULE_extractExpression); try { - setState(668); + setState(706); switch (_input.LA(1)) { case EXTRACT: enterOuterAlt(_localctx, 1); { - setState(663); + setState(701); extractTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(664); + setState(702); match(FUNCTION_ESC); - setState(665); + setState(703); extractTemplate(); - setState(666); + setState(704); match(ESC_END); } break; @@ -4840,21 +5110,21 @@ class SqlBaseParser extends Parser { public final ExtractTemplateContext extractTemplate() throws RecognitionException { ExtractTemplateContext _localctx = new ExtractTemplateContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_extractTemplate); + enterRule(_localctx, 80, RULE_extractTemplate); try { enterOuterAlt(_localctx, 1); { - setState(670); + setState(708); match(EXTRACT); - setState(671); + setState(709); match(T__0); - setState(672); + setState(710); ((ExtractTemplateContext)_localctx).field = identifier(); - setState(673); + setState(711); match(FROM); - setState(674); + setState(712); valueExpression(0); - setState(675); + setState(713); match(T__1); } } @@ -4895,9 +5165,9 @@ class SqlBaseParser extends Parser { public final FunctionExpressionContext functionExpression() throws RecognitionException { FunctionExpressionContext _localctx = new FunctionExpressionContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_functionExpression); + enterRule(_localctx, 82, RULE_functionExpression); try { - setState(682); + setState(720); switch (_input.LA(1)) { case ANALYZE: case ANALYZED: @@ -4926,6 +5196,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RIGHT: case RLIKE: @@ -4946,18 +5217,18 @@ class SqlBaseParser extends Parser { case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(677); + setState(715); functionTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(678); + setState(716); match(FUNCTION_ESC); - setState(679); + setState(717); functionTemplate(); - setState(680); + setState(718); match(ESC_END); } break; @@ -5010,50 +5281,50 @@ class SqlBaseParser extends Parser { public final FunctionTemplateContext functionTemplate() throws RecognitionException { FunctionTemplateContext _localctx = new FunctionTemplateContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_functionTemplate); + enterRule(_localctx, 84, RULE_functionTemplate); int _la; try { enterOuterAlt(_localctx, 1); { - setState(684); + setState(722); functionName(); - setState(685); + setState(723); match(T__0); - setState(697); + setState(735); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & ((1L << (NOT - 65)) | (1L << (NULL - 65)) | (1L << (OPTIMIZED - 65)) | (1L << (PARSED - 65)) | (1L << (PHYSICAL - 65)) | (1L << (PLAN - 65)) | (1L << (RIGHT - 65)) | (1L << (RLIKE - 65)) | (1L << (QUERY - 65)) | (1L << (SCHEMAS - 65)) | (1L << (SECOND - 65)) | (1L << (SHOW - 65)) | (1L << (SYS - 65)) | (1L << (TABLES - 65)) | (1L << (TEXT - 65)) | (1L << (TRUE - 65)) | (1L << (TYPE - 65)) | (1L << (TYPES - 65)) | (1L << (VERIFY - 65)) | (1L << (YEAR - 65)) | (1L << (FUNCTION_ESC - 65)) | (1L << (DATE_ESC - 65)) | (1L << (TIME_ESC - 65)) | (1L << (TIMESTAMP_ESC - 65)) | (1L << (GUID_ESC - 65)) | (1L << (PLUS - 65)) | (1L << (MINUS - 65)) | (1L << (ASTERISK - 65)) | (1L << (PARAM - 65)) | (1L << (STRING - 65)) | (1L << (INTEGER_VALUE - 65)) | (1L << (DECIMAL_VALUE - 65)) | (1L << (IDENTIFIER - 65)) | (1L << (DIGIT_IDENTIFIER - 65)))) != 0) || _la==QUOTED_IDENTIFIER || _la==BACKQUOTED_IDENTIFIER) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CASE) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CONVERT) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LEFT) | (1L << LIMIT) | (1L << MAPPED) | (1L << MATCH) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & ((1L << (NOT - 66)) | (1L << (NULL - 66)) | (1L << (OPTIMIZED - 66)) | (1L << (PARSED - 66)) | (1L << (PHYSICAL - 66)) | (1L << (PIVOT - 66)) | (1L << (PLAN - 66)) | (1L << (RIGHT - 66)) | (1L << (RLIKE - 66)) | (1L << (QUERY - 66)) | (1L << (SCHEMAS - 66)) | (1L << (SECOND - 66)) | (1L << (SHOW - 66)) | (1L << (SYS - 66)) | (1L << (TABLES - 66)) | (1L << (TEXT - 66)) | (1L << (TRUE - 66)) | (1L << (TYPE - 66)) | (1L << (TYPES - 66)) | (1L << (VERIFY - 66)) | (1L << (YEAR - 66)) | (1L << (FUNCTION_ESC - 66)) | (1L << (DATE_ESC - 66)) | (1L << (TIME_ESC - 66)) | (1L << (TIMESTAMP_ESC - 66)) | (1L << (GUID_ESC - 66)) | (1L << (PLUS - 66)) | (1L << (MINUS - 66)) | (1L << (ASTERISK - 66)) | (1L << (PARAM - 66)) | (1L << (STRING - 66)) | (1L << (INTEGER_VALUE - 66)) | (1L << (DECIMAL_VALUE - 66)) | (1L << (IDENTIFIER - 66)))) != 0) || ((((_la - 130)) & ~0x3f) == 0 && ((1L << (_la - 130)) & ((1L << (DIGIT_IDENTIFIER - 130)) | (1L << (QUOTED_IDENTIFIER - 130)) | (1L << (BACKQUOTED_IDENTIFIER - 130)))) != 0)) { { - setState(687); + setState(725); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(686); + setState(724); setQuantifier(); } } - setState(689); + setState(727); expression(); - setState(694); + setState(732); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(690); + setState(728); match(T__2); - setState(691); + setState(729); expression(); } } - setState(696); + setState(734); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(699); + setState(737); match(T__1); } } @@ -5095,21 +5366,21 @@ class SqlBaseParser extends Parser { public final FunctionNameContext functionName() throws RecognitionException { FunctionNameContext _localctx = new FunctionNameContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_functionName); + enterRule(_localctx, 86, RULE_functionName); try { - setState(704); + setState(742); switch (_input.LA(1)) { case LEFT: enterOuterAlt(_localctx, 1); { - setState(701); + setState(739); match(LEFT); } break; case RIGHT: enterOuterAlt(_localctx, 2); { - setState(702); + setState(740); match(RIGHT); } break; @@ -5139,6 +5410,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RLIKE: case QUERY: @@ -5158,7 +5430,7 @@ class SqlBaseParser extends Parser { case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 3); { - setState(703); + setState(741); identifier(); } break; @@ -5386,16 +5658,16 @@ class SqlBaseParser extends Parser { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_constant); + enterRule(_localctx, 88, RULE_constant); try { int _alt; - setState(732); + setState(770); switch (_input.LA(1)) { case NULL: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(706); + setState(744); match(NULL); } break; @@ -5403,7 +5675,7 @@ class SqlBaseParser extends Parser { _localctx = new IntervalLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(707); + setState(745); interval(); } break; @@ -5412,7 +5684,7 @@ class SqlBaseParser extends Parser { _localctx = new NumericLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(708); + setState(746); number(); } break; @@ -5421,7 +5693,7 @@ class SqlBaseParser extends Parser { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(709); + setState(747); booleanValue(); } break; @@ -5429,7 +5701,7 @@ class SqlBaseParser extends Parser { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(711); + setState(749); _errHandler.sync(this); _alt = 1; do { @@ -5437,7 +5709,7 @@ class SqlBaseParser extends Parser { case 1: { { - setState(710); + setState(748); match(STRING); } } @@ -5445,9 +5717,9 @@ class SqlBaseParser extends Parser { default: throw new NoViableAltException(this); } - setState(713); + setState(751); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,96,_ctx); + _alt = getInterpreter().adaptivePredict(_input,100,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); } break; @@ -5455,7 +5727,7 @@ class SqlBaseParser extends Parser { _localctx = new ParamLiteralContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(715); + setState(753); match(PARAM); } break; @@ -5463,11 +5735,11 @@ class SqlBaseParser extends Parser { _localctx = new DateEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(716); + setState(754); match(DATE_ESC); - setState(717); + setState(755); string(); - setState(718); + setState(756); match(ESC_END); } break; @@ -5475,11 +5747,11 @@ class SqlBaseParser extends Parser { _localctx = new TimeEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(720); + setState(758); match(TIME_ESC); - setState(721); + setState(759); string(); - setState(722); + setState(760); match(ESC_END); } break; @@ -5487,11 +5759,11 @@ class SqlBaseParser extends Parser { _localctx = new TimestampEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(724); + setState(762); match(TIMESTAMP_ESC); - setState(725); + setState(763); string(); - setState(726); + setState(764); match(ESC_END); } break; @@ -5499,11 +5771,11 @@ class SqlBaseParser extends Parser { _localctx = new GuidEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(728); + setState(766); match(GUID_ESC); - setState(729); + setState(767); string(); - setState(730); + setState(768); match(ESC_END); } break; @@ -5551,14 +5823,14 @@ class SqlBaseParser extends Parser { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_comparisonOperator); + enterRule(_localctx, 90, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(734); + setState(772); _la = _input.LA(1); - if ( !(((((_la - 108)) & ~0x3f) == 0 && ((1L << (_la - 108)) & ((1L << (EQ - 108)) | (1L << (NULLEQ - 108)) | (1L << (NEQ - 108)) | (1L << (LT - 108)) | (1L << (LTE - 108)) | (1L << (GT - 108)) | (1L << (GTE - 108)))) != 0)) ) { + if ( !(((((_la - 110)) & ~0x3f) == 0 && ((1L << (_la - 110)) & ((1L << (EQ - 110)) | (1L << (NULLEQ - 110)) | (1L << (NEQ - 110)) | (1L << (LT - 110)) | (1L << (LTE - 110)) | (1L << (GT - 110)) | (1L << (GTE - 110)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -5600,12 +5872,12 @@ class SqlBaseParser extends Parser { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_booleanValue); + enterRule(_localctx, 92, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(736); + setState(774); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -5668,18 +5940,18 @@ class SqlBaseParser extends Parser { public final IntervalContext interval() throws RecognitionException { IntervalContext _localctx = new IntervalContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_interval); + enterRule(_localctx, 94, RULE_interval); int _la; try { enterOuterAlt(_localctx, 1); { - setState(738); + setState(776); match(INTERVAL); - setState(740); + setState(778); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(739); + setState(777); ((IntervalContext)_localctx).sign = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -5690,35 +5962,35 @@ class SqlBaseParser extends Parser { } } - setState(744); + setState(782); switch (_input.LA(1)) { case INTEGER_VALUE: case DECIMAL_VALUE: { - setState(742); + setState(780); ((IntervalContext)_localctx).valueNumeric = number(); } break; case PARAM: case STRING: { - setState(743); + setState(781); ((IntervalContext)_localctx).valuePattern = string(); } break; default: throw new NoViableAltException(this); } - setState(746); + setState(784); ((IntervalContext)_localctx).leading = intervalField(); - setState(749); + setState(787); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,100,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,104,_ctx) ) { case 1: { - setState(747); + setState(785); match(TO); - setState(748); + setState(786); ((IntervalContext)_localctx).trailing = intervalField(); } break; @@ -5770,14 +6042,14 @@ class SqlBaseParser extends Parser { public final IntervalFieldContext intervalField() throws RecognitionException { IntervalFieldContext _localctx = new IntervalFieldContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_intervalField); + enterRule(_localctx, 96, RULE_intervalField); int _la; try { enterOuterAlt(_localctx, 1); { - setState(751); + setState(789); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << DAY) | (1L << DAYS) | (1L << HOUR) | (1L << HOURS) | (1L << MINUTE) | (1L << MINUTES) | (1L << MONTH) | (1L << MONTHS))) != 0) || ((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & ((1L << (SECOND - 80)) | (1L << (SECONDS - 80)) | (1L << (YEAR - 80)) | (1L << (YEARS - 80)))) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << DAY) | (1L << DAYS) | (1L << HOUR) | (1L << HOURS) | (1L << MINUTE) | (1L << MINUTES) | (1L << MONTH))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (MONTHS - 64)) | (1L << (SECOND - 64)) | (1L << (SECONDS - 64)) | (1L << (YEAR - 64)) | (1L << (YEARS - 64)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -5828,12 +6100,12 @@ class SqlBaseParser extends Parser { public final DataTypeContext dataType() throws RecognitionException { DataTypeContext _localctx = new DataTypeContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_dataType); + enterRule(_localctx, 98, RULE_dataType); try { _localctx = new PrimitiveDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(753); + setState(791); identifier(); } } @@ -5880,30 +6152,30 @@ class SqlBaseParser extends Parser { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_qualifiedName); + enterRule(_localctx, 100, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(760); + setState(798); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,101,_ctx); + _alt = getInterpreter().adaptivePredict(_input,105,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(755); + setState(793); identifier(); - setState(756); + setState(794); match(DOT); } } } - setState(762); + setState(800); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,101,_ctx); + _alt = getInterpreter().adaptivePredict(_input,105,_ctx); } - setState(763); + setState(801); identifier(); } } @@ -5946,15 +6218,15 @@ class SqlBaseParser extends Parser { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_identifier); + enterRule(_localctx, 102, RULE_identifier); try { - setState(767); + setState(805); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(765); + setState(803); quoteIdentifier(); } break; @@ -5984,6 +6256,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RLIKE: case QUERY: @@ -6001,7 +6274,7 @@ class SqlBaseParser extends Parser { case DIGIT_IDENTIFIER: enterOuterAlt(_localctx, 2); { - setState(766); + setState(804); unquoteIdentifier(); } break; @@ -6051,46 +6324,46 @@ class SqlBaseParser extends Parser { public final TableIdentifierContext tableIdentifier() throws RecognitionException { TableIdentifierContext _localctx = new TableIdentifierContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_tableIdentifier); + enterRule(_localctx, 104, RULE_tableIdentifier); int _la; try { - setState(781); + setState(819); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,105,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,109,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(772); + setState(810); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 69)) & ~0x3f) == 0 && ((1L << (_la - 69)) & ((1L << (OPTIMIZED - 69)) | (1L << (PARSED - 69)) | (1L << (PHYSICAL - 69)) | (1L << (PLAN - 69)) | (1L << (RLIKE - 69)) | (1L << (QUERY - 69)) | (1L << (SCHEMAS - 69)) | (1L << (SECOND - 69)) | (1L << (SHOW - 69)) | (1L << (SYS - 69)) | (1L << (TABLES - 69)) | (1L << (TEXT - 69)) | (1L << (TYPE - 69)) | (1L << (TYPES - 69)) | (1L << (VERIFY - 69)) | (1L << (YEAR - 69)) | (1L << (IDENTIFIER - 69)) | (1L << (DIGIT_IDENTIFIER - 69)) | (1L << (QUOTED_IDENTIFIER - 69)) | (1L << (BACKQUOTED_IDENTIFIER - 69)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OPTIMIZED - 70)) | (1L << (PARSED - 70)) | (1L << (PHYSICAL - 70)) | (1L << (PIVOT - 70)) | (1L << (PLAN - 70)) | (1L << (RLIKE - 70)) | (1L << (QUERY - 70)) | (1L << (SCHEMAS - 70)) | (1L << (SECOND - 70)) | (1L << (SHOW - 70)) | (1L << (SYS - 70)) | (1L << (TABLES - 70)) | (1L << (TEXT - 70)) | (1L << (TYPE - 70)) | (1L << (TYPES - 70)) | (1L << (VERIFY - 70)) | (1L << (YEAR - 70)) | (1L << (IDENTIFIER - 70)) | (1L << (DIGIT_IDENTIFIER - 70)) | (1L << (QUOTED_IDENTIFIER - 70)) | (1L << (BACKQUOTED_IDENTIFIER - 70)))) != 0)) { { - setState(769); + setState(807); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(770); + setState(808); match(T__3); } } - setState(774); + setState(812); match(TABLE_IDENTIFIER); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(778); + setState(816); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,104,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,108,_ctx) ) { case 1: { - setState(775); + setState(813); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(776); + setState(814); match(T__3); } break; } - setState(780); + setState(818); ((TableIdentifierContext)_localctx).name = identifier(); } break; @@ -6155,15 +6428,15 @@ class SqlBaseParser extends Parser { public final QuoteIdentifierContext quoteIdentifier() throws RecognitionException { QuoteIdentifierContext _localctx = new QuoteIdentifierContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_quoteIdentifier); + enterRule(_localctx, 106, RULE_quoteIdentifier); try { - setState(785); + setState(823); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: _localctx = new QuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(783); + setState(821); match(QUOTED_IDENTIFIER); } break; @@ -6171,7 +6444,7 @@ class SqlBaseParser extends Parser { _localctx = new BackQuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(784); + setState(822); match(BACKQUOTED_IDENTIFIER); } break; @@ -6241,15 +6514,15 @@ class SqlBaseParser extends Parser { public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionException { UnquoteIdentifierContext _localctx = new UnquoteIdentifierContext(_ctx, getState()); - enterRule(_localctx, 100, RULE_unquoteIdentifier); + enterRule(_localctx, 108, RULE_unquoteIdentifier); try { - setState(790); + setState(828); switch (_input.LA(1)) { case IDENTIFIER: _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(787); + setState(825); match(IDENTIFIER); } break; @@ -6279,6 +6552,7 @@ class SqlBaseParser extends Parser { case OPTIMIZED: case PARSED: case PHYSICAL: + case PIVOT: case PLAN: case RLIKE: case QUERY: @@ -6295,7 +6569,7 @@ class SqlBaseParser extends Parser { _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(788); + setState(826); nonReserved(); } break; @@ -6303,7 +6577,7 @@ class SqlBaseParser extends Parser { _localctx = new DigitIdentifierContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(789); + setState(827); match(DIGIT_IDENTIFIER); } break; @@ -6370,15 +6644,15 @@ class SqlBaseParser extends Parser { public final NumberContext number() throws RecognitionException { NumberContext _localctx = new NumberContext(_ctx, getState()); - enterRule(_localctx, 102, RULE_number); + enterRule(_localctx, 110, RULE_number); try { - setState(794); + setState(832); switch (_input.LA(1)) { case DECIMAL_VALUE: _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(792); + setState(830); match(DECIMAL_VALUE); } break; @@ -6386,7 +6660,7 @@ class SqlBaseParser extends Parser { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(793); + setState(831); match(INTEGER_VALUE); } break; @@ -6429,12 +6703,12 @@ class SqlBaseParser extends Parser { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 104, RULE_string); + enterRule(_localctx, 112, RULE_string); int _la; try { enterOuterAlt(_localctx, 1); { - setState(796); + setState(834); _la = _input.LA(1); if ( !(_la==PARAM || _la==STRING) ) { _errHandler.recoverInline(this); @@ -6486,17 +6760,17 @@ class SqlBaseParser extends Parser { public final WhenClauseContext whenClause() throws RecognitionException { WhenClauseContext _localctx = new WhenClauseContext(_ctx, getState()); - enterRule(_localctx, 106, RULE_whenClause); + enterRule(_localctx, 114, RULE_whenClause); try { enterOuterAlt(_localctx, 1); { - setState(798); + setState(836); match(WHEN); - setState(799); + setState(837); ((WhenClauseContext)_localctx).condition = expression(); - setState(800); + setState(838); match(THEN); - setState(801); + setState(839); ((WhenClauseContext)_localctx).result = expression(); } } @@ -6538,6 +6812,7 @@ class SqlBaseParser extends Parser { public TerminalNode OPTIMIZED() { return getToken(SqlBaseParser.OPTIMIZED, 0); } public TerminalNode PARSED() { return getToken(SqlBaseParser.PARSED, 0); } public TerminalNode PHYSICAL() { return getToken(SqlBaseParser.PHYSICAL, 0); } + public TerminalNode PIVOT() { return getToken(SqlBaseParser.PIVOT, 0); } public TerminalNode PLAN() { return getToken(SqlBaseParser.PLAN, 0); } public TerminalNode QUERY() { return getToken(SqlBaseParser.QUERY, 0); } public TerminalNode RLIKE() { return getToken(SqlBaseParser.RLIKE, 0); } @@ -6572,14 +6847,14 @@ class SqlBaseParser extends Parser { public final NonReservedContext nonReserved() throws RecognitionException { NonReservedContext _localctx = new NonReservedContext(_ctx, getState()); - enterRule(_localctx, 108, RULE_nonReserved); + enterRule(_localctx, 116, RULE_nonReserved); int _la; try { enterOuterAlt(_localctx, 1); { - setState(803); + setState(841); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 69)) & ~0x3f) == 0 && ((1L << (_la - 69)) & ((1L << (OPTIMIZED - 69)) | (1L << (PARSED - 69)) | (1L << (PHYSICAL - 69)) | (1L << (PLAN - 69)) | (1L << (RLIKE - 69)) | (1L << (QUERY - 69)) | (1L << (SCHEMAS - 69)) | (1L << (SECOND - 69)) | (1L << (SHOW - 69)) | (1L << (SYS - 69)) | (1L << (TABLES - 69)) | (1L << (TEXT - 69)) | (1L << (TYPE - 69)) | (1L << (TYPES - 69)) | (1L << (VERIFY - 69)) | (1L << (YEAR - 69)))) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << CURRENT_DATE) | (1L << CURRENT_TIME) | (1L << CURRENT_TIMESTAMP) | (1L << DAY) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FIRST) | (1L << FORMAT) | (1L << FULL) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << HOUR) | (1L << INTERVAL) | (1L << LAST) | (1L << LIMIT) | (1L << MAPPED) | (1L << MINUTE) | (1L << MONTH))) != 0) || ((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OPTIMIZED - 70)) | (1L << (PARSED - 70)) | (1L << (PHYSICAL - 70)) | (1L << (PIVOT - 70)) | (1L << (PLAN - 70)) | (1L << (RLIKE - 70)) | (1L << (QUERY - 70)) | (1L << (SCHEMAS - 70)) | (1L << (SECOND - 70)) | (1L << (SHOW - 70)) | (1L << (SYS - 70)) | (1L << (TABLES - 70)) | (1L << (TEXT - 70)) | (1L << (TYPE - 70)) | (1L << (TYPES - 70)) | (1L << (VERIFY - 70)) | (1L << (YEAR - 70)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -6599,11 +6874,11 @@ class SqlBaseParser extends Parser { public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { - case 22: + case 26: return booleanExpression_sempred((BooleanExpressionContext)_localctx, predIndex); - case 29: + case 33: return valueExpression_sempred((ValueExpressionContext)_localctx, predIndex); - case 30: + case 34: return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex); } return true; @@ -6637,328 +6912,341 @@ class SqlBaseParser extends Parser { } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\u008a\u0328\4\2\t"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\u008c\u034e\4\2\t"+ "\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ "\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+ "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4"+ ",\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t"+ - "\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3"+ - "\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4\u0080\n\4\f\4\16\4\u0083\13\4\3\4\5"+ - "\4\u0086\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4\u008f\n\4\f\4\16\4\u0092"+ - "\13\4\3\4\5\4\u0095\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u009c\n\4\3\4\3\4\5\4"+ - "\u00a0\n\4\3\4\3\4\3\4\3\4\5\4\u00a6\n\4\3\4\3\4\3\4\5\4\u00ab\n\4\3\4"+ - "\3\4\3\4\5\4\u00b0\n\4\3\4\3\4\5\4\u00b4\n\4\3\4\3\4\3\4\5\4\u00b9\n\4"+ - "\3\4\3\4\3\4\3\4\3\4\3\4\5\4\u00c1\n\4\3\4\3\4\5\4\u00c5\n\4\3\4\3\4\3"+ - "\4\3\4\7\4\u00cb\n\4\f\4\16\4\u00ce\13\4\5\4\u00d0\n\4\3\4\3\4\3\4\3\4"+ - "\5\4\u00d6\n\4\3\4\3\4\3\4\5\4\u00db\n\4\3\4\5\4\u00de\n\4\3\4\3\4\3\4"+ - "\5\4\u00e3\n\4\3\4\5\4\u00e6\n\4\5\4\u00e8\n\4\3\5\3\5\3\5\3\5\7\5\u00ee"+ - "\n\5\f\5\16\5\u00f1\13\5\5\5\u00f3\n\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3\6"+ - "\7\6\u00fd\n\6\f\6\16\6\u0100\13\6\5\6\u0102\n\6\3\6\5\6\u0105\n\6\3\7"+ - "\3\7\3\7\3\7\3\7\5\7\u010c\n\7\3\b\3\b\3\b\3\b\3\b\5\b\u0113\n\b\3\t\3"+ - "\t\5\t\u0117\n\t\3\t\3\t\5\t\u011b\n\t\3\n\3\n\5\n\u011f\n\n\3\n\3\n\3"+ - "\n\7\n\u0124\n\n\f\n\16\n\u0127\13\n\3\n\5\n\u012a\n\n\3\n\3\n\5\n\u012e"+ - "\n\n\3\n\3\n\3\n\5\n\u0133\n\n\3\n\3\n\5\n\u0137\n\n\3\13\3\13\3\13\3"+ - "\13\7\13\u013d\n\13\f\13\16\13\u0140\13\13\3\f\5\f\u0143\n\f\3\f\3\f\3"+ - "\f\7\f\u0148\n\f\f\f\16\f\u014b\13\f\3\r\3\r\3\16\3\16\3\16\3\16\7\16"+ - "\u0153\n\16\f\16\16\16\u0156\13\16\5\16\u0158\n\16\3\16\3\16\5\16\u015c"+ - "\n\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\21\3\21\5\21\u0168\n\21"+ - "\3\21\5\21\u016b\n\21\3\22\3\22\7\22\u016f\n\22\f\22\16\22\u0172\13\22"+ - "\3\23\3\23\3\23\3\23\5\23\u0178\n\23\3\23\3\23\3\23\3\23\3\23\5\23\u017f"+ - "\n\23\3\24\5\24\u0182\n\24\3\24\3\24\5\24\u0186\n\24\3\24\3\24\5\24\u018a"+ - "\n\24\3\24\3\24\5\24\u018e\n\24\5\24\u0190\n\24\3\25\3\25\3\25\3\25\3"+ - "\25\3\25\3\25\7\25\u0199\n\25\f\25\16\25\u019c\13\25\3\25\3\25\5\25\u01a0"+ - "\n\25\3\26\5\26\u01a3\n\26\3\26\3\26\5\26\u01a7\n\26\3\26\5\26\u01aa\n"+ - "\26\3\26\3\26\3\26\3\26\5\26\u01b0\n\26\3\26\5\26\u01b3\n\26\3\26\3\26"+ - "\3\26\3\26\5\26\u01b9\n\26\3\26\5\26\u01bc\n\26\5\26\u01be\n\26\3\27\3"+ - "\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3"+ - "\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3"+ - "\30\3\30\3\30\3\30\5\30\u01e1\n\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30"+ - "\u01e9\n\30\f\30\16\30\u01ec\13\30\3\31\3\31\7\31\u01f0\n\31\f\31\16\31"+ - "\u01f3\13\31\3\32\3\32\5\32\u01f7\n\32\3\33\5\33\u01fa\n\33\3\33\3\33"+ - "\3\33\3\33\3\33\3\33\5\33\u0202\n\33\3\33\3\33\3\33\3\33\3\33\7\33\u0209"+ - "\n\33\f\33\16\33\u020c\13\33\3\33\3\33\3\33\5\33\u0211\n\33\3\33\3\33"+ - "\3\33\3\33\3\33\3\33\5\33\u0219\n\33\3\33\3\33\3\33\5\33\u021e\n\33\3"+ - "\33\3\33\3\33\3\33\5\33\u0224\n\33\3\33\5\33\u0227\n\33\3\34\3\34\3\34"+ - "\3\35\3\35\5\35\u022e\n\35\3\36\3\36\3\36\3\36\3\36\3\36\5\36\u0236\n"+ - "\36\3\37\3\37\3\37\3\37\5\37\u023c\n\37\3\37\3\37\3\37\3\37\3\37\3\37"+ - "\3\37\3\37\3\37\3\37\7\37\u0248\n\37\f\37\16\37\u024b\13\37\3 \3 \3 \3"+ - " \3 \3 \3 \3 \5 \u0255\n \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \5 \u0264"+ - "\n \3 \6 \u0267\n \r \16 \u0268\3 \3 \5 \u026d\n \3 \3 \5 \u0271\n \3"+ - " \3 \3 \7 \u0276\n \f \16 \u0279\13 \3!\3!\3!\5!\u027e\n!\3\"\3\"\3\""+ - "\3\"\3\"\3\"\3\"\3\"\3\"\3\"\5\"\u028a\n\"\3#\3#\3#\3#\3#\3#\3#\3$\3$"+ - "\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\5%\u029f\n%\3&\3&\3&\3&\3&\3&\3&\3\'\3"+ - "\'\3\'\3\'\3\'\5\'\u02ad\n\'\3(\3(\3(\5(\u02b2\n(\3(\3(\3(\7(\u02b7\n"+ - "(\f(\16(\u02ba\13(\5(\u02bc\n(\3(\3(\3)\3)\3)\5)\u02c3\n)\3*\3*\3*\3*"+ - "\3*\6*\u02ca\n*\r*\16*\u02cb\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3"+ - "*\3*\3*\3*\5*\u02df\n*\3+\3+\3,\3,\3-\3-\5-\u02e7\n-\3-\3-\5-\u02eb\n"+ - "-\3-\3-\3-\5-\u02f0\n-\3.\3.\3/\3/\3\60\3\60\3\60\7\60\u02f9\n\60\f\60"+ - "\16\60\u02fc\13\60\3\60\3\60\3\61\3\61\5\61\u0302\n\61\3\62\3\62\3\62"+ - "\5\62\u0307\n\62\3\62\3\62\3\62\3\62\5\62\u030d\n\62\3\62\5\62\u0310\n"+ - "\62\3\63\3\63\5\63\u0314\n\63\3\64\3\64\3\64\5\64\u0319\n\64\3\65\3\65"+ - "\5\65\u031d\n\65\3\66\3\66\3\67\3\67\3\67\3\67\3\67\38\38\38\2\5.<>9\2"+ - "\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJL"+ - "NPRTVXZ\\^`bdfhjln\2\22\b\2\7\7\t\t\"\"<ARSde\3\2}~\30\2\b\t\23\24"+ - "\26\31\33\33\"\"$$\'(+-\60\60\65\6588;<>>@@GGKMORUVXY]^``dd\u038b\2p\3"+ - "\2\2\2\4s\3\2\2\2\6\u00e7\3\2\2\2\b\u00f2\3\2\2\2\n\u00f6\3\2\2\2\f\u010b"+ - "\3\2\2\2\16\u0112\3\2\2\2\20\u0114\3\2\2\2\22\u011c\3\2\2\2\24\u0138\3"+ - "\2\2\2\26\u0142\3\2\2\2\30\u014c\3\2\2\2\32\u015b\3\2\2\2\34\u015d\3\2"+ - "\2\2\36\u0163\3\2\2\2 \u0165\3\2\2\2\"\u016c\3\2\2\2$\u017e\3\2\2\2&\u018f"+ - "\3\2\2\2(\u019f\3\2\2\2*\u01bd\3\2\2\2,\u01bf\3\2\2\2.\u01e0\3\2\2\2\60"+ - "\u01f1\3\2\2\2\62\u01f4\3\2\2\2\64\u0226\3\2\2\2\66\u0228\3\2\2\28\u022b"+ - "\3\2\2\2:\u0235\3\2\2\2<\u023b\3\2\2\2>\u0270\3\2\2\2@\u027d\3\2\2\2B"+ - "\u0289\3\2\2\2D\u028b\3\2\2\2F\u0292\3\2\2\2H\u029e\3\2\2\2J\u02a0\3\2"+ - "\2\2L\u02ac\3\2\2\2N\u02ae\3\2\2\2P\u02c2\3\2\2\2R\u02de\3\2\2\2T\u02e0"+ - "\3\2\2\2V\u02e2\3\2\2\2X\u02e4\3\2\2\2Z\u02f1\3\2\2\2\\\u02f3\3\2\2\2"+ - "^\u02fa\3\2\2\2`\u0301\3\2\2\2b\u030f\3\2\2\2d\u0313\3\2\2\2f\u0318\3"+ - "\2\2\2h\u031c\3\2\2\2j\u031e\3\2\2\2l\u0320\3\2\2\2n\u0325\3\2\2\2pq\5"+ - "\6\4\2qr\7\2\2\3r\3\3\2\2\2st\5,\27\2tu\7\2\2\3u\5\3\2\2\2v\u00e8\5\b"+ - "\5\2w\u0085\7$\2\2x\u0081\7\3\2\2yz\7M\2\2z\u0080\t\2\2\2{|\7(\2\2|\u0080"+ - "\t\3\2\2}~\7`\2\2~\u0080\5V,\2\177y\3\2\2\2\177{\3\2\2\2\177}\3\2\2\2"+ - "\u0080\u0083\3\2\2\2\u0081\177\3\2\2\2\u0081\u0082\3\2\2\2\u0082\u0084"+ - "\3\2\2\2\u0083\u0081\3\2\2\2\u0084\u0086\7\4\2\2\u0085x\3\2\2\2\u0085"+ - "\u0086\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u00e8\5\6\4\2\u0088\u0094\7\33"+ - "\2\2\u0089\u0090\7\3\2\2\u008a\u008b\7M\2\2\u008b\u008f\t\4\2\2\u008c"+ - "\u008d\7(\2\2\u008d\u008f\t\3\2\2\u008e\u008a\3\2\2\2\u008e\u008c\3\2"+ - "\2\2\u008f\u0092\3\2\2\2\u0090\u008e\3\2\2\2\u0090\u0091\3\2\2\2\u0091"+ - "\u0093\3\2\2\2\u0092\u0090\3\2\2\2\u0093\u0095\7\4\2\2\u0094\u0089\3\2"+ - "\2\2\u0094\u0095\3\2\2\2\u0095\u0096\3\2\2\2\u0096\u00e8\5\6\4\2\u0097"+ - "\u0098\7U\2\2\u0098\u009b\7X\2\2\u0099\u009a\7\63\2\2\u009a\u009c\7*\2"+ - "\2\u009b\u0099\3\2\2\2\u009b\u009c\3\2\2\2\u009c\u009f\3\2\2\2\u009d\u00a0"+ - "\5\66\34\2\u009e\u00a0\5b\62\2\u009f\u009d\3\2\2\2\u009f\u009e\3\2\2\2"+ - "\u009f\u00a0\3\2\2\2\u00a0\u00e8\3\2\2\2\u00a1\u00a2\7U\2\2\u00a2\u00a5"+ - "\7\24\2\2\u00a3\u00a4\7\63\2\2\u00a4\u00a6\7*\2\2\u00a5\u00a3\3\2\2\2"+ - "\u00a5\u00a6\3\2\2\2\u00a6\u00a7\3\2\2\2\u00a7\u00aa\t\5\2\2\u00a8\u00ab"+ - "\5\66\34\2\u00a9\u00ab\5b\62\2\u00aa\u00a8\3\2\2\2\u00aa\u00a9\3\2\2\2"+ - "\u00ab\u00e8\3\2\2\2\u00ac\u00af\t\6\2\2\u00ad\u00ae\7\63\2\2\u00ae\u00b0"+ - "\7*\2\2\u00af\u00ad\3\2\2\2\u00af\u00b0\3\2\2\2\u00b0\u00b3\3\2\2\2\u00b1"+ - "\u00b4\5\66\34\2\u00b2\u00b4\5b\62\2\u00b3\u00b1\3\2\2\2\u00b3\u00b2\3"+ - "\2\2\2\u00b4\u00e8\3\2\2\2\u00b5\u00b6\7U\2\2\u00b6\u00b8\7,\2\2\u00b7"+ - "\u00b9\5\66\34\2\u00b8\u00b7\3\2\2\2\u00b8\u00b9\3\2\2\2\u00b9\u00e8\3"+ - "\2\2\2\u00ba\u00bb\7U\2\2\u00bb\u00e8\7Q\2\2\u00bc\u00bd\7V\2\2\u00bd"+ - "\u00c0\7X\2\2\u00be\u00bf\7\22\2\2\u00bf\u00c1\5\66\34\2\u00c0\u00be\3"+ - "\2\2\2\u00c0\u00c1\3\2\2\2\u00c1\u00c4\3\2\2\2\u00c2\u00c5\5\66\34\2\u00c3"+ - "\u00c5\5b\62\2\u00c4\u00c2\3\2\2\2\u00c4\u00c3\3\2\2\2\u00c4\u00c5\3\2"+ - "\2\2\u00c5\u00cf\3\2\2\2\u00c6\u00c7\7]\2\2\u00c7\u00cc\5j\66\2\u00c8"+ - "\u00c9\7\5\2\2\u00c9\u00cb\5j\66\2\u00ca\u00c8\3\2\2\2\u00cb\u00ce\3\2"+ - "\2\2\u00cc\u00ca\3\2\2\2\u00cc\u00cd\3\2\2\2\u00cd\u00d0\3\2\2\2\u00ce"+ - "\u00cc\3\2\2\2\u00cf\u00c6\3\2\2\2\u00cf\u00d0\3\2\2\2\u00d0\u00e8\3\2"+ - "\2\2\u00d1\u00d2\7V\2\2\u00d2\u00d5\7\24\2\2\u00d3\u00d4\7\22\2\2\u00d4"+ - "\u00d6\5j\66\2\u00d5\u00d3\3\2\2\2\u00d5\u00d6\3\2\2\2\u00d6\u00da\3\2"+ - "\2\2\u00d7\u00d8\7W\2\2\u00d8\u00db\5\66\34\2\u00d9\u00db\5b\62\2\u00da"+ - "\u00d7\3\2\2\2\u00da\u00d9\3\2\2\2\u00da\u00db\3\2\2\2\u00db\u00dd\3\2"+ - "\2\2\u00dc\u00de\5\66\34\2\u00dd\u00dc\3\2\2\2\u00dd\u00de\3\2\2\2\u00de"+ - "\u00e8\3\2\2\2\u00df\u00e0\7V\2\2\u00e0\u00e5\7^\2\2\u00e1\u00e3\t\7\2"+ - "\2\u00e2\u00e1\3\2\2\2\u00e2\u00e3\3\2\2\2\u00e3\u00e4\3\2\2\2\u00e4\u00e6"+ - "\5h\65\2\u00e5\u00e2\3\2\2\2\u00e5\u00e6\3\2\2\2\u00e6\u00e8\3\2\2\2\u00e7"+ - "v\3\2\2\2\u00e7w\3\2\2\2\u00e7\u0088\3\2\2\2\u00e7\u0097\3\2\2\2\u00e7"+ - "\u00a1\3\2\2\2\u00e7\u00ac\3\2\2\2\u00e7\u00b5\3\2\2\2\u00e7\u00ba\3\2"+ - "\2\2\u00e7\u00bc\3\2\2\2\u00e7\u00d1\3\2\2\2\u00e7\u00df\3\2\2\2\u00e8"+ - "\7\3\2\2\2\u00e9\u00ea\7c\2\2\u00ea\u00ef\5\34\17\2\u00eb\u00ec\7\5\2"+ - "\2\u00ec\u00ee\5\34\17\2\u00ed\u00eb\3\2\2\2\u00ee\u00f1\3\2\2\2\u00ef"+ - "\u00ed\3\2\2\2\u00ef\u00f0\3\2\2\2\u00f0\u00f3\3\2\2\2\u00f1\u00ef\3\2"+ - "\2\2\u00f2\u00e9\3\2\2\2\u00f2\u00f3\3\2\2\2\u00f3\u00f4\3\2\2\2\u00f4"+ - "\u00f5\5\n\6\2\u00f5\t\3\2\2\2\u00f6\u0101\5\16\b\2\u00f7\u00f8\7I\2\2"+ - "\u00f8\u00f9\7\17\2\2\u00f9\u00fe\5\20\t\2\u00fa\u00fb\7\5\2\2\u00fb\u00fd"+ - "\5\20\t\2\u00fc\u00fa\3\2\2\2\u00fd\u0100\3\2\2\2\u00fe\u00fc\3\2\2\2"+ - "\u00fe\u00ff\3\2\2\2\u00ff\u0102\3\2\2\2\u0100\u00fe\3\2\2\2\u0101\u00f7"+ - "\3\2\2\2\u0101\u0102\3\2\2\2\u0102\u0104\3\2\2\2\u0103\u0105\5\f\7\2\u0104"+ - "\u0103\3\2\2\2\u0104\u0105\3\2\2\2\u0105\13\3\2\2\2\u0106\u0107\7;\2\2"+ - "\u0107\u010c\t\b\2\2\u0108\u0109\7h\2\2\u0109\u010a\t\b\2\2\u010a\u010c"+ - "\7m\2\2\u010b\u0106\3\2\2\2\u010b\u0108\3\2\2\2\u010c\r\3\2\2\2\u010d"+ - "\u0113\5\22\n\2\u010e\u010f\7\3\2\2\u010f\u0110\5\n\6\2\u0110\u0111\7"+ - "\4\2\2\u0111\u0113\3\2\2\2\u0112\u010d\3\2\2\2\u0112\u010e\3\2\2\2\u0113"+ - "\17\3\2\2\2\u0114\u0116\5,\27\2\u0115\u0117\t\t\2\2\u0116\u0115\3\2\2"+ - "\2\u0116\u0117\3\2\2\2\u0117\u011a\3\2\2\2\u0118\u0119\7E\2\2\u0119\u011b"+ - "\t\n\2\2\u011a\u0118\3\2\2\2\u011a\u011b\3\2\2\2\u011b\21\3\2\2\2\u011c"+ - "\u011e\7T\2\2\u011d\u011f\5\36\20\2\u011e\u011d\3\2\2\2\u011e\u011f\3"+ - "\2\2\2\u011f\u0120\3\2\2\2\u0120\u0125\5 \21\2\u0121\u0122\7\5\2\2\u0122"+ - "\u0124\5 \21\2\u0123\u0121\3\2\2\2\u0124\u0127\3\2\2\2\u0125\u0123\3\2"+ - "\2\2\u0125\u0126\3\2\2\2\u0126\u0129\3\2\2\2\u0127\u0125\3\2\2\2\u0128"+ - "\u012a\5\24\13\2\u0129\u0128\3\2\2\2\u0129\u012a\3\2\2\2\u012a\u012d\3"+ - "\2\2\2\u012b\u012c\7b\2\2\u012c\u012e\5.\30\2\u012d\u012b\3\2\2\2\u012d"+ - "\u012e\3\2\2\2\u012e\u0132\3\2\2\2\u012f\u0130\7.\2\2\u0130\u0131\7\17"+ - "\2\2\u0131\u0133\5\26\f\2\u0132\u012f\3\2\2\2\u0132\u0133\3\2\2\2\u0133"+ - "\u0136\3\2\2\2\u0134\u0135\7/\2\2\u0135\u0137\5.\30\2\u0136\u0134\3\2"+ - "\2\2\u0136\u0137\3\2\2\2\u0137\23\3\2\2\2\u0138\u0139\7)\2\2\u0139\u013e"+ - "\5\"\22\2\u013a\u013b\7\5\2\2\u013b\u013d\5\"\22\2\u013c\u013a\3\2\2\2"+ - "\u013d\u0140\3\2\2\2\u013e\u013c\3\2\2\2\u013e\u013f\3\2\2\2\u013f\25"+ - "\3\2\2\2\u0140\u013e\3\2\2\2\u0141\u0143\5\36\20\2\u0142\u0141\3\2\2\2"+ - "\u0142\u0143\3\2\2\2\u0143\u0144\3\2\2\2\u0144\u0149\5\30\r\2\u0145\u0146"+ - "\7\5\2\2\u0146\u0148\5\30\r\2\u0147\u0145\3\2\2\2\u0148\u014b\3\2\2\2"+ - "\u0149\u0147\3\2\2\2\u0149\u014a\3\2\2\2\u014a\27\3\2\2\2\u014b\u0149"+ - "\3\2\2\2\u014c\u014d\5\32\16\2\u014d\31\3\2\2\2\u014e\u0157\7\3\2\2\u014f"+ - "\u0154\5,\27\2\u0150\u0151\7\5\2\2\u0151\u0153\5,\27\2\u0152\u0150\3\2"+ - "\2\2\u0153\u0156\3\2\2\2\u0154\u0152\3\2\2\2\u0154\u0155\3\2\2\2\u0155"+ - "\u0158\3\2\2\2\u0156\u0154\3\2\2\2\u0157\u014f\3\2\2\2\u0157\u0158\3\2"+ - "\2\2\u0158\u0159\3\2\2\2\u0159\u015c\7\4\2\2\u015a\u015c\5,\27\2\u015b"+ - "\u014e\3\2\2\2\u015b\u015a\3\2\2\2\u015c\33\3\2\2\2\u015d\u015e\5`\61"+ - "\2\u015e\u015f\7\f\2\2\u015f\u0160\7\3\2\2\u0160\u0161\5\n\6\2\u0161\u0162"+ - "\7\4\2\2\u0162\35\3\2\2\2\u0163\u0164\t\13\2\2\u0164\37\3\2\2\2\u0165"+ - "\u016a\5,\27\2\u0166\u0168\7\f\2\2\u0167\u0166\3\2\2\2\u0167\u0168\3\2"+ - "\2\2\u0168\u0169\3\2\2\2\u0169\u016b\5`\61\2\u016a\u0167\3\2\2\2\u016a"+ - "\u016b\3\2\2\2\u016b!\3\2\2\2\u016c\u0170\5*\26\2\u016d\u016f\5$\23\2"+ - "\u016e\u016d\3\2\2\2\u016f\u0172\3\2\2\2\u0170\u016e\3\2\2\2\u0170\u0171"+ - "\3\2\2\2\u0171#\3\2\2\2\u0172\u0170\3\2\2\2\u0173\u0174\5&\24\2\u0174"+ - "\u0175\7\67\2\2\u0175\u0177\5*\26\2\u0176\u0178\5(\25\2\u0177\u0176\3"+ - "\2\2\2\u0177\u0178\3\2\2\2\u0178\u017f\3\2\2\2\u0179\u017a\7B\2\2\u017a"+ - "\u017b\5&\24\2\u017b\u017c\7\67\2\2\u017c\u017d\5*\26\2\u017d\u017f\3"+ - "\2\2\2\u017e\u0173\3\2\2\2\u017e\u0179\3\2\2\2\u017f%\3\2\2\2\u0180\u0182"+ - "\7\64\2\2\u0181\u0180\3\2\2\2\u0181\u0182\3\2\2\2\u0182\u0190\3\2\2\2"+ - "\u0183\u0185\79\2\2\u0184\u0186\7J\2\2\u0185\u0184\3\2\2\2\u0185\u0186"+ - "\3\2\2\2\u0186\u0190\3\2\2\2\u0187\u0189\7N\2\2\u0188\u018a\7J\2\2\u0189"+ - "\u0188\3\2\2\2\u0189\u018a\3\2\2\2\u018a\u0190\3\2\2\2\u018b\u018d\7+"+ - "\2\2\u018c\u018e\7J\2\2\u018d\u018c\3\2\2\2\u018d\u018e\3\2\2\2\u018e"+ - "\u0190\3\2\2\2\u018f\u0181\3\2\2\2\u018f\u0183\3\2\2\2\u018f\u0187\3\2"+ - "\2\2\u018f\u018b\3\2\2\2\u0190\'\3\2\2\2\u0191\u0192\7F\2\2\u0192\u01a0"+ - "\5.\30\2\u0193\u0194\7_\2\2\u0194\u0195\7\3\2\2\u0195\u019a\5`\61\2\u0196"+ - "\u0197\7\5\2\2\u0197\u0199\5`\61\2\u0198\u0196\3\2\2\2\u0199\u019c\3\2"+ - "\2\2\u019a\u0198\3\2\2\2\u019a\u019b\3\2\2\2\u019b\u019d\3\2\2\2\u019c"+ - "\u019a\3\2\2\2\u019d\u019e\7\4\2\2\u019e\u01a0\3\2\2\2\u019f\u0191\3\2"+ - "\2\2\u019f\u0193\3\2\2\2\u01a0)\3\2\2\2\u01a1\u01a3\7*\2\2\u01a2\u01a1"+ - "\3\2\2\2\u01a2\u01a3\3\2\2\2\u01a3\u01a4\3\2\2\2\u01a4\u01a9\5b\62\2\u01a5"+ - "\u01a7\7\f\2\2\u01a6\u01a5\3\2\2\2\u01a6\u01a7\3\2\2\2\u01a7\u01a8\3\2"+ - "\2\2\u01a8\u01aa\5^\60\2\u01a9\u01a6\3\2\2\2\u01a9\u01aa\3\2\2\2\u01aa"+ - "\u01be\3\2\2\2\u01ab\u01ac\7\3\2\2\u01ac\u01ad\5\n\6\2\u01ad\u01b2\7\4"+ - "\2\2\u01ae\u01b0\7\f\2\2\u01af\u01ae\3\2\2\2\u01af\u01b0\3\2\2\2\u01b0"+ - "\u01b1\3\2\2\2\u01b1\u01b3\5^\60\2\u01b2\u01af\3\2\2\2\u01b2\u01b3\3\2"+ - "\2\2\u01b3\u01be\3\2\2\2\u01b4\u01b5\7\3\2\2\u01b5\u01b6\5\"\22\2\u01b6"+ - "\u01bb\7\4\2\2\u01b7\u01b9\7\f\2\2\u01b8\u01b7\3\2\2\2\u01b8\u01b9\3\2"+ - "\2\2\u01b9\u01ba\3\2\2\2\u01ba\u01bc\5^\60\2\u01bb\u01b8\3\2\2\2\u01bb"+ - "\u01bc\3\2\2\2\u01bc\u01be\3\2\2\2\u01bd\u01a2\3\2\2\2\u01bd\u01ab\3\2"+ - "\2\2\u01bd\u01b4\3\2\2\2\u01be+\3\2\2\2\u01bf\u01c0\5.\30\2\u01c0-\3\2"+ - "\2\2\u01c1\u01c2\b\30\1\2\u01c2\u01c3\7C\2\2\u01c3\u01e1\5.\30\n\u01c4"+ - "\u01c5\7#\2\2\u01c5\u01c6\7\3\2\2\u01c6\u01c7\5\b\5\2\u01c7\u01c8\7\4"+ - "\2\2\u01c8\u01e1\3\2\2\2\u01c9\u01ca\7P\2\2\u01ca\u01cb\7\3\2\2\u01cb"+ - "\u01cc\5j\66\2\u01cc\u01cd\5\60\31\2\u01cd\u01ce\7\4\2\2\u01ce\u01e1\3"+ - "\2\2\2\u01cf\u01d0\7=\2\2\u01d0\u01d1\7\3\2\2\u01d1\u01d2\5^\60\2\u01d2"+ - "\u01d3\7\5\2\2\u01d3\u01d4\5j\66\2\u01d4\u01d5\5\60\31\2\u01d5\u01d6\7"+ - "\4\2\2\u01d6\u01e1\3\2\2\2\u01d7\u01d8\7=\2\2\u01d8\u01d9\7\3\2\2\u01d9"+ - "\u01da\5j\66\2\u01da\u01db\7\5\2\2\u01db\u01dc\5j\66\2\u01dc\u01dd\5\60"+ - "\31\2\u01dd\u01de\7\4\2\2\u01de\u01e1\3\2\2\2\u01df\u01e1\5\62\32\2\u01e0"+ - "\u01c1\3\2\2\2\u01e0\u01c4\3\2\2\2\u01e0\u01c9\3\2\2\2\u01e0\u01cf\3\2"+ - "\2\2\u01e0\u01d7\3\2\2\2\u01e0\u01df\3\2\2\2\u01e1\u01ea\3\2\2\2\u01e2"+ - "\u01e3\f\4\2\2\u01e3\u01e4\7\n\2\2\u01e4\u01e9\5.\30\5\u01e5\u01e6\f\3"+ - "\2\2\u01e6\u01e7\7H\2\2\u01e7\u01e9\5.\30\4\u01e8\u01e2\3\2\2\2\u01e8"+ - "\u01e5\3\2\2\2\u01e9\u01ec\3\2\2\2\u01ea\u01e8\3\2\2\2\u01ea\u01eb\3\2"+ - "\2\2\u01eb/\3\2\2\2\u01ec\u01ea\3\2\2\2\u01ed\u01ee\7\5\2\2\u01ee\u01f0"+ - "\5j\66\2\u01ef\u01ed\3\2\2\2\u01f0\u01f3\3\2\2\2\u01f1\u01ef\3\2\2\2\u01f1"+ - "\u01f2\3\2\2\2\u01f2\61\3\2\2\2\u01f3\u01f1\3\2\2\2\u01f4\u01f6\5<\37"+ - "\2\u01f5\u01f7\5\64\33\2\u01f6\u01f5\3\2\2\2\u01f6\u01f7\3\2\2\2\u01f7"+ - "\63\3\2\2\2\u01f8\u01fa\7C\2\2\u01f9\u01f8\3\2\2\2\u01f9\u01fa\3\2\2\2"+ - "\u01fa\u01fb\3\2\2\2\u01fb\u01fc\7\16\2\2\u01fc\u01fd\5<\37\2\u01fd\u01fe"+ - "\7\n\2\2\u01fe\u01ff\5<\37\2\u01ff\u0227\3\2\2\2\u0200\u0202\7C\2\2\u0201"+ - "\u0200\3\2\2\2\u0201\u0202\3\2\2\2\u0202\u0203\3\2\2\2\u0203\u0204\7\62"+ - "\2\2\u0204\u0205\7\3\2\2\u0205\u020a\5<\37\2\u0206\u0207\7\5\2\2\u0207"+ - "\u0209\5<\37\2\u0208\u0206\3\2\2\2\u0209\u020c\3\2\2\2\u020a\u0208\3\2"+ - "\2\2\u020a\u020b\3\2\2\2\u020b\u020d\3\2\2\2\u020c\u020a\3\2\2\2\u020d"+ - "\u020e\7\4\2\2\u020e\u0227\3\2\2\2\u020f\u0211\7C\2\2\u0210\u020f\3\2"+ - "\2\2\u0210\u0211\3\2\2\2\u0211\u0212\3\2\2\2\u0212\u0213\7\62\2\2\u0213"+ - "\u0214\7\3\2\2\u0214\u0215\5\b\5\2\u0215\u0216\7\4\2\2\u0216\u0227\3\2"+ - "\2\2\u0217\u0219\7C\2\2\u0218\u0217\3\2\2\2\u0218\u0219\3\2\2\2\u0219"+ - "\u021a\3\2\2\2\u021a\u021b\7:\2\2\u021b\u0227\58\35\2\u021c\u021e\7C\2"+ - "\2\u021d\u021c\3\2\2\2\u021d\u021e\3\2\2\2\u021e\u021f\3\2\2\2\u021f\u0220"+ - "\7O\2\2\u0220\u0227\5j\66\2\u0221\u0223\7\66\2\2\u0222\u0224\7C\2\2\u0223"+ - "\u0222\3\2\2\2\u0223\u0224\3\2\2\2\u0224\u0225\3\2\2\2\u0225\u0227\7D"+ - "\2\2\u0226\u01f9\3\2\2\2\u0226\u0201\3\2\2\2\u0226\u0210\3\2\2\2\u0226"+ - "\u0218\3\2\2\2\u0226\u021d\3\2\2\2\u0226\u0221\3\2\2\2\u0227\65\3\2\2"+ - "\2\u0228\u0229\7:\2\2\u0229\u022a\58\35\2\u022a\67\3\2\2\2\u022b\u022d"+ - "\5j\66\2\u022c\u022e\5:\36\2\u022d\u022c\3\2\2\2\u022d\u022e\3\2\2\2\u022e"+ - "9\3\2\2\2\u022f\u0230\7!\2\2\u0230\u0236\5j\66\2\u0231\u0232\7f\2\2\u0232"+ - "\u0233\5j\66\2\u0233\u0234\7m\2\2\u0234\u0236\3\2\2\2\u0235\u022f\3\2"+ - "\2\2\u0235\u0231\3\2\2\2\u0236;\3\2\2\2\u0237\u0238\b\37\1\2\u0238\u023c"+ - "\5> \2\u0239\u023a\t\7\2\2\u023a\u023c\5<\37\6\u023b\u0237\3\2\2\2\u023b"+ - "\u0239\3\2\2\2\u023c\u0249\3\2\2\2\u023d\u023e\f\5\2\2\u023e\u023f\t\f"+ - "\2\2\u023f\u0248\5<\37\6\u0240\u0241\f\4\2\2\u0241\u0242\t\7\2\2\u0242"+ - "\u0248\5<\37\5\u0243\u0244\f\3\2\2\u0244\u0245\5T+\2\u0245\u0246\5<\37"+ - "\4\u0246\u0248\3\2\2\2\u0247\u023d\3\2\2\2\u0247\u0240\3\2\2\2\u0247\u0243"+ - "\3\2\2\2\u0248\u024b\3\2\2\2\u0249\u0247\3\2\2\2\u0249\u024a\3\2\2\2\u024a"+ - "=\3\2\2\2\u024b\u0249\3\2\2\2\u024c\u024d\b \1\2\u024d\u0271\5B\"\2\u024e"+ - "\u0271\5H%\2\u024f\u0271\5@!\2\u0250\u0271\5R*\2\u0251\u0252\5^\60\2\u0252"+ - "\u0253\7|\2\2\u0253\u0255\3\2\2\2\u0254\u0251\3\2\2\2\u0254\u0255\3\2"+ - "\2\2\u0255\u0256\3\2\2\2\u0256\u0271\7w\2\2\u0257\u0271\5L\'\2\u0258\u0259"+ - "\7\3\2\2\u0259\u025a\5\b\5\2\u025a\u025b\7\4\2\2\u025b\u0271\3\2\2\2\u025c"+ - "\u0271\5^\60\2\u025d\u025e\7\3\2\2\u025e\u025f\5,\27\2\u025f\u0260\7\4"+ - "\2\2\u0260\u0271\3\2\2\2\u0261\u0263\7\20\2\2\u0262\u0264\5.\30\2\u0263"+ - "\u0262\3\2\2\2\u0263\u0264\3\2\2\2\u0264\u0266\3\2\2\2\u0265\u0267\5l"+ - "\67\2\u0266\u0265\3\2\2\2\u0267\u0268\3\2\2\2\u0268\u0266\3\2\2\2\u0268"+ - "\u0269\3\2\2\2\u0269\u026c\3\2\2\2\u026a\u026b\7\37\2\2\u026b\u026d\5"+ - ".\30\2\u026c\u026a\3\2\2\2\u026c\u026d\3\2\2\2\u026d\u026e\3\2\2\2\u026e"+ - "\u026f\7 \2\2\u026f\u0271\3\2\2\2\u0270\u024c\3\2\2\2\u0270\u024e\3\2"+ - "\2\2\u0270\u024f\3\2\2\2\u0270\u0250\3\2\2\2\u0270\u0254\3\2\2\2\u0270"+ - "\u0257\3\2\2\2\u0270\u0258\3\2\2\2\u0270\u025c\3\2\2\2\u0270\u025d\3\2"+ - "\2\2\u0270\u0261\3\2\2\2\u0271\u0277\3\2\2\2\u0272\u0273\f\f\2\2\u0273"+ - "\u0274\7z\2\2\u0274\u0276\5\\/\2\u0275\u0272\3\2\2\2\u0276\u0279\3\2\2"+ - "\2\u0277\u0275\3\2\2\2\u0277\u0278\3\2\2\2\u0278?\3\2\2\2\u0279\u0277"+ - "\3\2\2\2\u027a\u027e\7\30\2\2\u027b\u027e\7\26\2\2\u027c\u027e\7\27\2"+ - "\2\u027d\u027a\3\2\2\2\u027d\u027b\3\2\2\2\u027d\u027c\3\2\2\2\u027eA"+ - "\3\2\2\2\u027f\u028a\5D#\2\u0280\u0281\7g\2\2\u0281\u0282\5D#\2\u0282"+ - "\u0283\7m\2\2\u0283\u028a\3\2\2\2\u0284\u028a\5F$\2\u0285\u0286\7g\2\2"+ - "\u0286\u0287\5F$\2\u0287\u0288\7m\2\2\u0288\u028a\3\2\2\2\u0289\u027f"+ - "\3\2\2\2\u0289\u0280\3\2\2\2\u0289\u0284\3\2\2\2\u0289\u0285\3\2\2\2\u028a"+ - "C\3\2\2\2\u028b\u028c\7\21\2\2\u028c\u028d\7\3\2\2\u028d\u028e\5,\27\2"+ - "\u028e\u028f\7\f\2\2\u028f\u0290\5\\/\2\u0290\u0291\7\4\2\2\u0291E\3\2"+ - "\2\2\u0292\u0293\7\25\2\2\u0293\u0294\7\3\2\2\u0294\u0295\5,\27\2\u0295"+ - "\u0296\7\5\2\2\u0296\u0297\5\\/\2\u0297\u0298\7\4\2\2\u0298G\3\2\2\2\u0299"+ - "\u029f\5J&\2\u029a\u029b\7g\2\2\u029b\u029c\5J&\2\u029c\u029d\7m\2\2\u029d"+ - "\u029f\3\2\2\2\u029e\u0299\3\2\2\2\u029e\u029a\3\2\2\2\u029fI\3\2\2\2"+ - "\u02a0\u02a1\7%\2\2\u02a1\u02a2\7\3\2\2\u02a2\u02a3\5`\61\2\u02a3\u02a4"+ - "\7)\2\2\u02a4\u02a5\5<\37\2\u02a5\u02a6\7\4\2\2\u02a6K\3\2\2\2\u02a7\u02ad"+ - "\5N(\2\u02a8\u02a9\7g\2\2\u02a9\u02aa\5N(\2\u02aa\u02ab\7m\2\2\u02ab\u02ad"+ - "\3\2\2\2\u02ac\u02a7\3\2\2\2\u02ac\u02a8\3\2\2\2\u02adM\3\2\2\2\u02ae"+ - "\u02af\5P)\2\u02af\u02bb\7\3\2\2\u02b0\u02b2\5\36\20\2\u02b1\u02b0\3\2"+ - "\2\2\u02b1\u02b2\3\2\2\2\u02b2\u02b3\3\2\2\2\u02b3\u02b8\5,\27\2\u02b4"+ - "\u02b5\7\5\2\2\u02b5\u02b7\5,\27\2\u02b6\u02b4\3\2\2\2\u02b7\u02ba\3\2"+ - "\2\2\u02b8\u02b6\3\2\2\2\u02b8\u02b9\3\2\2\2\u02b9\u02bc\3\2\2\2\u02ba"+ - "\u02b8\3\2\2\2\u02bb\u02b1\3\2\2\2\u02bb\u02bc\3\2\2\2\u02bc\u02bd\3\2"+ - "\2\2\u02bd\u02be\7\4\2\2\u02beO\3\2\2\2\u02bf\u02c3\79\2\2\u02c0\u02c3"+ - "\7N\2\2\u02c1\u02c3\5`\61\2\u02c2\u02bf\3\2\2\2\u02c2\u02c0\3\2\2\2\u02c2"+ - "\u02c1\3\2\2\2\u02c3Q\3\2\2\2\u02c4\u02df\7D\2\2\u02c5\u02df\5X-\2\u02c6"+ - "\u02df\5h\65\2\u02c7\u02df\5V,\2\u02c8\u02ca\7~\2\2\u02c9\u02c8\3\2\2"+ - "\2\u02ca\u02cb\3\2\2\2\u02cb\u02c9\3\2\2\2\u02cb\u02cc\3\2\2\2\u02cc\u02df"+ - "\3\2\2\2\u02cd\u02df\7}\2\2\u02ce\u02cf\7i\2\2\u02cf\u02d0\5j\66\2\u02d0"+ - "\u02d1\7m\2\2\u02d1\u02df\3\2\2\2\u02d2\u02d3\7j\2\2\u02d3\u02d4\5j\66"+ - "\2\u02d4\u02d5\7m\2\2\u02d5\u02df\3\2\2\2\u02d6\u02d7\7k\2\2\u02d7\u02d8"+ - "\5j\66\2\u02d8\u02d9\7m\2\2\u02d9\u02df\3\2\2\2\u02da\u02db\7l\2\2\u02db"+ - "\u02dc\5j\66\2\u02dc\u02dd\7m\2\2\u02dd\u02df\3\2\2\2\u02de\u02c4\3\2"+ - "\2\2\u02de\u02c5\3\2\2\2\u02de\u02c6\3\2\2\2\u02de\u02c7\3\2\2\2\u02de"+ - "\u02c9\3\2\2\2\u02de\u02cd\3\2\2\2\u02de\u02ce\3\2\2\2\u02de\u02d2\3\2"+ - "\2\2\u02de\u02d6\3\2\2\2\u02de\u02da\3\2\2\2\u02dfS\3\2\2\2\u02e0\u02e1"+ - "\t\r\2\2\u02e1U\3\2\2\2\u02e2\u02e3\t\16\2\2\u02e3W\3\2\2\2\u02e4\u02e6"+ - "\7\65\2\2\u02e5\u02e7\t\7\2\2\u02e6\u02e5\3\2\2\2\u02e6\u02e7\3\2\2\2"+ - "\u02e7\u02ea\3\2\2\2\u02e8\u02eb\5h\65\2\u02e9\u02eb\5j\66\2\u02ea\u02e8"+ - "\3\2\2\2\u02ea\u02e9\3\2\2\2\u02eb\u02ec\3\2\2\2\u02ec\u02ef\5Z.\2\u02ed"+ - "\u02ee\7\\\2\2\u02ee\u02f0\5Z.\2\u02ef\u02ed\3\2\2\2\u02ef\u02f0\3\2\2"+ - "\2\u02f0Y\3\2\2\2\u02f1\u02f2\t\17\2\2\u02f2[\3\2\2\2\u02f3\u02f4\5`\61"+ - "\2\u02f4]\3\2\2\2\u02f5\u02f6\5`\61\2\u02f6\u02f7\7|\2\2\u02f7\u02f9\3"+ - "\2\2\2\u02f8\u02f5\3\2\2\2\u02f9\u02fc\3\2\2\2\u02fa\u02f8\3\2\2\2\u02fa"+ - "\u02fb\3\2\2\2\u02fb\u02fd\3\2\2\2\u02fc\u02fa\3\2\2\2\u02fd\u02fe\5`"+ - "\61\2\u02fe_\3\2\2\2\u02ff\u0302\5d\63\2\u0300\u0302\5f\64\2\u0301\u02ff"+ - "\3\2\2\2\u0301\u0300\3\2\2\2\u0302a\3\2\2\2\u0303\u0304\5`\61\2\u0304"+ - "\u0305\7\6\2\2\u0305\u0307\3\2\2\2\u0306\u0303\3\2\2\2\u0306\u0307\3\2"+ - "\2\2\u0307\u0308\3\2\2\2\u0308\u0310\7\u0083\2\2\u0309\u030a\5`\61\2\u030a"+ - "\u030b\7\6\2\2\u030b\u030d\3\2\2\2\u030c\u0309\3\2\2\2\u030c\u030d\3\2"+ - "\2\2\u030d\u030e\3\2\2\2\u030e\u0310\5`\61\2\u030f\u0306\3\2\2\2\u030f"+ - "\u030c\3\2\2\2\u0310c\3\2\2\2\u0311\u0314\7\u0084\2\2\u0312\u0314\7\u0085"+ - "\2\2\u0313\u0311\3\2\2\2\u0313\u0312\3\2\2\2\u0314e\3\2\2\2\u0315\u0319"+ - "\7\u0081\2\2\u0316\u0319\5n8\2\u0317\u0319\7\u0082\2\2\u0318\u0315\3\2"+ - "\2\2\u0318\u0316\3\2\2\2\u0318\u0317\3\2\2\2\u0319g\3\2\2\2\u031a\u031d"+ - "\7\u0080\2\2\u031b\u031d\7\177\2\2\u031c\u031a\3\2\2\2\u031c\u031b\3\2"+ - "\2\2\u031di\3\2\2\2\u031e\u031f\t\20\2\2\u031fk\3\2\2\2\u0320\u0321\7"+ - "a\2\2\u0321\u0322\5,\27\2\u0322\u0323\7Z\2\2\u0323\u0324\5,\27\2\u0324"+ - "m\3\2\2\2\u0325\u0326\t\21\2\2\u0326o\3\2\2\2o\177\u0081\u0085\u008e\u0090"+ - "\u0094\u009b\u009f\u00a5\u00aa\u00af\u00b3\u00b8\u00c0\u00c4\u00cc\u00cf"+ - "\u00d5\u00da\u00dd\u00e2\u00e5\u00e7\u00ef\u00f2\u00fe\u0101\u0104\u010b"+ - "\u0112\u0116\u011a\u011e\u0125\u0129\u012d\u0132\u0136\u013e\u0142\u0149"+ - "\u0154\u0157\u015b\u0167\u016a\u0170\u0177\u017e\u0181\u0185\u0189\u018d"+ - "\u018f\u019a\u019f\u01a2\u01a6\u01a9\u01af\u01b2\u01b8\u01bb\u01bd\u01e0"+ - "\u01e8\u01ea\u01f1\u01f6\u01f9\u0201\u020a\u0210\u0218\u021d\u0223\u0226"+ - "\u022d\u0235\u023b\u0247\u0249\u0254\u0263\u0268\u026c\u0270\u0277\u027d"+ - "\u0289\u029e\u02ac\u02b1\u02b8\u02bb\u02c2\u02cb\u02de\u02e6\u02ea\u02ef"+ - "\u02fa\u0301\u0306\u030c\u030f\u0313\u0318\u031c"; + "\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\3\2\3"+ + "\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4\u0088\n\4\f"+ + "\4\16\4\u008b\13\4\3\4\5\4\u008e\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4\u0097"+ + "\n\4\f\4\16\4\u009a\13\4\3\4\5\4\u009d\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u00a4"+ + "\n\4\3\4\3\4\5\4\u00a8\n\4\3\4\3\4\3\4\3\4\5\4\u00ae\n\4\3\4\3\4\3\4\5"+ + "\4\u00b3\n\4\3\4\3\4\3\4\5\4\u00b8\n\4\3\4\3\4\5\4\u00bc\n\4\3\4\3\4\3"+ + "\4\5\4\u00c1\n\4\3\4\3\4\3\4\3\4\3\4\3\4\5\4\u00c9\n\4\3\4\3\4\5\4\u00cd"+ + "\n\4\3\4\3\4\3\4\3\4\7\4\u00d3\n\4\f\4\16\4\u00d6\13\4\5\4\u00d8\n\4\3"+ + "\4\3\4\3\4\3\4\5\4\u00de\n\4\3\4\3\4\3\4\5\4\u00e3\n\4\3\4\5\4\u00e6\n"+ + "\4\3\4\3\4\3\4\5\4\u00eb\n\4\3\4\5\4\u00ee\n\4\5\4\u00f0\n\4\3\5\3\5\3"+ + "\5\3\5\7\5\u00f6\n\5\f\5\16\5\u00f9\13\5\5\5\u00fb\n\5\3\5\3\5\3\6\3\6"+ + "\3\6\3\6\3\6\3\6\7\6\u0105\n\6\f\6\16\6\u0108\13\6\5\6\u010a\n\6\3\6\5"+ + "\6\u010d\n\6\3\7\3\7\3\7\3\7\3\7\5\7\u0114\n\7\3\b\3\b\3\b\3\b\3\b\5\b"+ + "\u011b\n\b\3\t\3\t\5\t\u011f\n\t\3\t\3\t\5\t\u0123\n\t\3\n\3\n\5\n\u0127"+ + "\n\n\3\n\3\n\5\n\u012b\n\n\3\n\3\n\5\n\u012f\n\n\3\n\3\n\3\n\5\n\u0134"+ + "\n\n\3\n\3\n\5\n\u0138\n\n\3\13\3\13\3\13\3\13\7\13\u013e\n\13\f\13\16"+ + "\13\u0141\13\13\3\13\5\13\u0144\n\13\3\f\5\f\u0147\n\f\3\f\3\f\3\f\7\f"+ + "\u014c\n\f\f\f\16\f\u014f\13\f\3\r\3\r\3\16\3\16\3\16\3\16\7\16\u0157"+ + "\n\16\f\16\16\16\u015a\13\16\5\16\u015c\n\16\3\16\3\16\5\16\u0160\n\16"+ + "\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\21\3\21\3\21\7\21\u016d\n\21"+ + "\f\21\16\21\u0170\13\21\3\22\3\22\5\22\u0174\n\22\3\22\5\22\u0177\n\22"+ + "\3\23\3\23\7\23\u017b\n\23\f\23\16\23\u017e\13\23\3\24\3\24\3\24\3\24"+ + "\5\24\u0184\n\24\3\24\3\24\3\24\3\24\3\24\5\24\u018b\n\24\3\25\5\25\u018e"+ + "\n\25\3\25\3\25\5\25\u0192\n\25\3\25\3\25\5\25\u0196\n\25\3\25\3\25\5"+ + "\25\u019a\n\25\5\25\u019c\n\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\7\26"+ + "\u01a5\n\26\f\26\16\26\u01a8\13\26\3\26\3\26\5\26\u01ac\n\26\3\27\5\27"+ + "\u01af\n\27\3\27\3\27\5\27\u01b3\n\27\3\27\5\27\u01b6\n\27\3\27\3\27\3"+ + "\27\3\27\5\27\u01bc\n\27\3\27\5\27\u01bf\n\27\3\27\3\27\3\27\3\27\5\27"+ + "\u01c5\n\27\3\27\5\27\u01c8\n\27\5\27\u01ca\n\27\3\30\3\30\3\30\3\30\3"+ + "\30\3\30\3\30\3\30\3\30\3\30\3\30\3\31\3\31\3\31\7\31\u01da\n\31\f\31"+ + "\16\31\u01dd\13\31\3\32\3\32\5\32\u01e1\n\32\3\32\5\32\u01e4\n\32\3\33"+ + "\3\33\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34"+ + "\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34"+ + "\3\34\3\34\3\34\3\34\5\34\u0207\n\34\3\34\3\34\3\34\3\34\3\34\3\34\7\34"+ + "\u020f\n\34\f\34\16\34\u0212\13\34\3\35\3\35\7\35\u0216\n\35\f\35\16\35"+ + "\u0219\13\35\3\36\3\36\5\36\u021d\n\36\3\37\5\37\u0220\n\37\3\37\3\37"+ + "\3\37\3\37\3\37\3\37\5\37\u0228\n\37\3\37\3\37\3\37\3\37\3\37\7\37\u022f"+ + "\n\37\f\37\16\37\u0232\13\37\3\37\3\37\3\37\5\37\u0237\n\37\3\37\3\37"+ + "\3\37\3\37\3\37\3\37\5\37\u023f\n\37\3\37\3\37\3\37\5\37\u0244\n\37\3"+ + "\37\3\37\3\37\3\37\5\37\u024a\n\37\3\37\5\37\u024d\n\37\3 \3 \3 \3!\3"+ + "!\5!\u0254\n!\3\"\3\"\3\"\3\"\3\"\3\"\5\"\u025c\n\"\3#\3#\3#\3#\5#\u0262"+ + "\n#\3#\3#\3#\3#\3#\3#\3#\3#\3#\3#\7#\u026e\n#\f#\16#\u0271\13#\3$\3$\3"+ + "$\3$\3$\3$\3$\3$\5$\u027b\n$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\5"+ + "$\u028a\n$\3$\6$\u028d\n$\r$\16$\u028e\3$\3$\5$\u0293\n$\3$\3$\5$\u0297"+ + "\n$\3$\3$\3$\7$\u029c\n$\f$\16$\u029f\13$\3%\3%\3%\5%\u02a4\n%\3&\3&\3"+ + "&\3&\3&\3&\3&\3&\3&\3&\5&\u02b0\n&\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3"+ + "(\3(\3(\3(\3(\3)\3)\3)\3)\3)\5)\u02c5\n)\3*\3*\3*\3*\3*\3*\3*\3+\3+\3"+ + "+\3+\3+\5+\u02d3\n+\3,\3,\3,\5,\u02d8\n,\3,\3,\3,\7,\u02dd\n,\f,\16,\u02e0"+ + "\13,\5,\u02e2\n,\3,\3,\3-\3-\3-\5-\u02e9\n-\3.\3.\3.\3.\3.\6.\u02f0\n"+ + ".\r.\16.\u02f1\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\5.\u0305"+ + "\n.\3/\3/\3\60\3\60\3\61\3\61\5\61\u030d\n\61\3\61\3\61\5\61\u0311\n\61"+ + "\3\61\3\61\3\61\5\61\u0316\n\61\3\62\3\62\3\63\3\63\3\64\3\64\3\64\7\64"+ + "\u031f\n\64\f\64\16\64\u0322\13\64\3\64\3\64\3\65\3\65\5\65\u0328\n\65"+ + "\3\66\3\66\3\66\5\66\u032d\n\66\3\66\3\66\3\66\3\66\5\66\u0333\n\66\3"+ + "\66\5\66\u0336\n\66\3\67\3\67\5\67\u033a\n\67\38\38\38\58\u033f\n8\39"+ + "\39\59\u0343\n9\3:\3:\3;\3;\3;\3;\3;\3<\3<\3<\2\5\66DF=\2\4\6\b\n\f\16"+ + "\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\^`bd"+ + "fhjlnprtv\2\22\b\2\7\7\t\t\"\"==HHLL\4\2..[[\4\2\t\tHH\4\2**\63\63\3\2"+ + "\34\35\3\2wx\4\2\7\7\u0081\u0081\4\2\r\r\34\34\4\2\'\'99\4\2\7\7\36\36"+ + "\3\2y{\3\2pv\4\2&&]]\7\2\31\32\61\62?BTUfg\3\2\177\u0080\31\2\b\t\23\24"+ + "\26\31\33\33\"\"$$\'\')),.\61\61\66\6699<=??AAHHLOQTWXZ[_`bbff\u03b1\2"+ + "x\3\2\2\2\4{\3\2\2\2\6\u00ef\3\2\2\2\b\u00fa\3\2\2\2\n\u00fe\3\2\2\2\f"+ + "\u0113\3\2\2\2\16\u011a\3\2\2\2\20\u011c\3\2\2\2\22\u0124\3\2\2\2\24\u0139"+ + "\3\2\2\2\26\u0146\3\2\2\2\30\u0150\3\2\2\2\32\u015f\3\2\2\2\34\u0161\3"+ + "\2\2\2\36\u0167\3\2\2\2 \u0169\3\2\2\2\"\u0171\3\2\2\2$\u0178\3\2\2\2"+ + "&\u018a\3\2\2\2(\u019b\3\2\2\2*\u01ab\3\2\2\2,\u01c9\3\2\2\2.\u01cb\3"+ + "\2\2\2\60\u01d6\3\2\2\2\62\u01de\3\2\2\2\64\u01e5\3\2\2\2\66\u0206\3\2"+ + "\2\28\u0217\3\2\2\2:\u021a\3\2\2\2<\u024c\3\2\2\2>\u024e\3\2\2\2@\u0251"+ + "\3\2\2\2B\u025b\3\2\2\2D\u0261\3\2\2\2F\u0296\3\2\2\2H\u02a3\3\2\2\2J"+ + "\u02af\3\2\2\2L\u02b1\3\2\2\2N\u02b8\3\2\2\2P\u02c4\3\2\2\2R\u02c6\3\2"+ + "\2\2T\u02d2\3\2\2\2V\u02d4\3\2\2\2X\u02e8\3\2\2\2Z\u0304\3\2\2\2\\\u0306"+ + "\3\2\2\2^\u0308\3\2\2\2`\u030a\3\2\2\2b\u0317\3\2\2\2d\u0319\3\2\2\2f"+ + "\u0320\3\2\2\2h\u0327\3\2\2\2j\u0335\3\2\2\2l\u0339\3\2\2\2n\u033e\3\2"+ + "\2\2p\u0342\3\2\2\2r\u0344\3\2\2\2t\u0346\3\2\2\2v\u034b\3\2\2\2xy\5\6"+ + "\4\2yz\7\2\2\3z\3\3\2\2\2{|\5\64\33\2|}\7\2\2\3}\5\3\2\2\2~\u00f0\5\b"+ + "\5\2\177\u008d\7$\2\2\u0080\u0089\7\3\2\2\u0081\u0082\7O\2\2\u0082\u0088"+ + "\t\2\2\2\u0083\u0084\7)\2\2\u0084\u0088\t\3\2\2\u0085\u0086\7b\2\2\u0086"+ + "\u0088\5^\60\2\u0087\u0081\3\2\2\2\u0087\u0083\3\2\2\2\u0087\u0085\3\2"+ + "\2\2\u0088\u008b\3\2\2\2\u0089\u0087\3\2\2\2\u0089\u008a\3\2\2\2\u008a"+ + "\u008c\3\2\2\2\u008b\u0089\3\2\2\2\u008c\u008e\7\4\2\2\u008d\u0080\3\2"+ + "\2\2\u008d\u008e\3\2\2\2\u008e\u008f\3\2\2\2\u008f\u00f0\5\6\4\2\u0090"+ + "\u009c\7\33\2\2\u0091\u0098\7\3\2\2\u0092\u0093\7O\2\2\u0093\u0097\t\4"+ + "\2\2\u0094\u0095\7)\2\2\u0095\u0097\t\3\2\2\u0096\u0092\3\2\2\2\u0096"+ + "\u0094\3\2\2\2\u0097\u009a\3\2\2\2\u0098\u0096\3\2\2\2\u0098\u0099\3\2"+ + "\2\2\u0099\u009b\3\2\2\2\u009a\u0098\3\2\2\2\u009b\u009d\7\4\2\2\u009c"+ + "\u0091\3\2\2\2\u009c\u009d\3\2\2\2\u009d\u009e\3\2\2\2\u009e\u00f0\5\6"+ + "\4\2\u009f\u00a0\7W\2\2\u00a0\u00a3\7Z\2\2\u00a1\u00a2\7\64\2\2\u00a2"+ + "\u00a4\7+\2\2\u00a3\u00a1\3\2\2\2\u00a3\u00a4\3\2\2\2\u00a4\u00a7\3\2"+ + "\2\2\u00a5\u00a8\5> \2\u00a6\u00a8\5j\66\2\u00a7\u00a5\3\2\2\2\u00a7\u00a6"+ + "\3\2\2\2\u00a7\u00a8\3\2\2\2\u00a8\u00f0\3\2\2\2\u00a9\u00aa\7W\2\2\u00aa"+ + "\u00ad\7\24\2\2\u00ab\u00ac\7\64\2\2\u00ac\u00ae\7+\2\2\u00ad\u00ab\3"+ + "\2\2\2\u00ad\u00ae\3\2\2\2\u00ae\u00af\3\2\2\2\u00af\u00b2\t\5\2\2\u00b0"+ + "\u00b3\5> \2\u00b1\u00b3\5j\66\2\u00b2\u00b0\3\2\2\2\u00b2\u00b1\3\2\2"+ + "\2\u00b3\u00f0\3\2\2\2\u00b4\u00b7\t\6\2\2\u00b5\u00b6\7\64\2\2\u00b6"+ + "\u00b8\7+\2\2\u00b7\u00b5\3\2\2\2\u00b7\u00b8\3\2\2\2\u00b8\u00bb\3\2"+ + "\2\2\u00b9\u00bc\5> \2\u00ba\u00bc\5j\66\2\u00bb\u00b9\3\2\2\2\u00bb\u00ba"+ + "\3\2\2\2\u00bc\u00f0\3\2\2\2\u00bd\u00be\7W\2\2\u00be\u00c0\7-\2\2\u00bf"+ + "\u00c1\5> \2\u00c0\u00bf\3\2\2\2\u00c0\u00c1\3\2\2\2\u00c1\u00f0\3\2\2"+ + "\2\u00c2\u00c3\7W\2\2\u00c3\u00f0\7S\2\2\u00c4\u00c5\7X\2\2\u00c5\u00c8"+ + "\7Z\2\2\u00c6\u00c7\7\22\2\2\u00c7\u00c9\5> \2\u00c8\u00c6\3\2\2\2\u00c8"+ + "\u00c9\3\2\2\2\u00c9\u00cc\3\2\2\2\u00ca\u00cd\5> \2\u00cb\u00cd\5j\66"+ + "\2\u00cc\u00ca\3\2\2\2\u00cc\u00cb\3\2\2\2\u00cc\u00cd\3\2\2\2\u00cd\u00d7"+ + "\3\2\2\2\u00ce\u00cf\7_\2\2\u00cf\u00d4\5r:\2\u00d0\u00d1\7\5\2\2\u00d1"+ + "\u00d3\5r:\2\u00d2\u00d0\3\2\2\2\u00d3\u00d6\3\2\2\2\u00d4\u00d2\3\2\2"+ + "\2\u00d4\u00d5\3\2\2\2\u00d5\u00d8\3\2\2\2\u00d6\u00d4\3\2\2\2\u00d7\u00ce"+ + "\3\2\2\2\u00d7\u00d8\3\2\2\2\u00d8\u00f0\3\2\2\2\u00d9\u00da\7X\2\2\u00da"+ + "\u00dd\7\24\2\2\u00db\u00dc\7\22\2\2\u00dc\u00de\5r:\2\u00dd\u00db\3\2"+ + "\2\2\u00dd\u00de\3\2\2\2\u00de\u00e2\3\2\2\2\u00df\u00e0\7Y\2\2\u00e0"+ + "\u00e3\5> \2\u00e1\u00e3\5j\66\2\u00e2\u00df\3\2\2\2\u00e2\u00e1\3\2\2"+ + "\2\u00e2\u00e3\3\2\2\2\u00e3\u00e5\3\2\2\2\u00e4\u00e6\5> \2\u00e5\u00e4"+ + "\3\2\2\2\u00e5\u00e6\3\2\2\2\u00e6\u00f0\3\2\2\2\u00e7\u00e8\7X\2\2\u00e8"+ + "\u00ed\7`\2\2\u00e9\u00eb\t\7\2\2\u00ea\u00e9\3\2\2\2\u00ea\u00eb\3\2"+ + "\2\2\u00eb\u00ec\3\2\2\2\u00ec\u00ee\5p9\2\u00ed\u00ea\3\2\2\2\u00ed\u00ee"+ + "\3\2\2\2\u00ee\u00f0\3\2\2\2\u00ef~\3\2\2\2\u00ef\177\3\2\2\2\u00ef\u0090"+ + "\3\2\2\2\u00ef\u009f\3\2\2\2\u00ef\u00a9\3\2\2\2\u00ef\u00b4\3\2\2\2\u00ef"+ + "\u00bd\3\2\2\2\u00ef\u00c2\3\2\2\2\u00ef\u00c4\3\2\2\2\u00ef\u00d9\3\2"+ + "\2\2\u00ef\u00e7\3\2\2\2\u00f0\7\3\2\2\2\u00f1\u00f2\7e\2\2\u00f2\u00f7"+ + "\5\34\17\2\u00f3\u00f4\7\5\2\2\u00f4\u00f6\5\34\17\2\u00f5\u00f3\3\2\2"+ + "\2\u00f6\u00f9\3\2\2\2\u00f7\u00f5\3\2\2\2\u00f7\u00f8\3\2\2\2\u00f8\u00fb"+ + "\3\2\2\2\u00f9\u00f7\3\2\2\2\u00fa\u00f1\3\2\2\2\u00fa\u00fb\3\2\2\2\u00fb"+ + "\u00fc\3\2\2\2\u00fc\u00fd\5\n\6\2\u00fd\t\3\2\2\2\u00fe\u0109\5\16\b"+ + "\2\u00ff\u0100\7J\2\2\u0100\u0101\7\17\2\2\u0101\u0106\5\20\t\2\u0102"+ + "\u0103\7\5\2\2\u0103\u0105\5\20\t\2\u0104\u0102\3\2\2\2\u0105\u0108\3"+ + "\2\2\2\u0106\u0104\3\2\2\2\u0106\u0107\3\2\2\2\u0107\u010a\3\2\2\2\u0108"+ + "\u0106\3\2\2\2\u0109\u00ff\3\2\2\2\u0109\u010a\3\2\2\2\u010a\u010c\3\2"+ + "\2\2\u010b\u010d\5\f\7\2\u010c\u010b\3\2\2\2\u010c\u010d\3\2\2\2\u010d"+ + "\13\3\2\2\2\u010e\u010f\7<\2\2\u010f\u0114\t\b\2\2\u0110\u0111\7j\2\2"+ + "\u0111\u0112\t\b\2\2\u0112\u0114\7o\2\2\u0113\u010e\3\2\2\2\u0113\u0110"+ + "\3\2\2\2\u0114\r\3\2\2\2\u0115\u011b\5\22\n\2\u0116\u0117\7\3\2\2\u0117"+ + "\u0118\5\n\6\2\u0118\u0119\7\4\2\2\u0119\u011b\3\2\2\2\u011a\u0115\3\2"+ + "\2\2\u011a\u0116\3\2\2\2\u011b\17\3\2\2\2\u011c\u011e\5\64\33\2\u011d"+ + "\u011f\t\t\2\2\u011e\u011d\3\2\2\2\u011e\u011f\3\2\2\2\u011f\u0122\3\2"+ + "\2\2\u0120\u0121\7F\2\2\u0121\u0123\t\n\2\2\u0122\u0120\3\2\2\2\u0122"+ + "\u0123\3\2\2\2\u0123\21\3\2\2\2\u0124\u0126\7V\2\2\u0125\u0127\5\36\20"+ + "\2\u0126\u0125\3\2\2\2\u0126\u0127\3\2\2\2\u0127\u0128\3\2\2\2\u0128\u012a"+ + "\5 \21\2\u0129\u012b\5\24\13\2\u012a\u0129\3\2\2\2\u012a\u012b\3\2\2\2"+ + "\u012b\u012e\3\2\2\2\u012c\u012d\7d\2\2\u012d\u012f\5\66\34\2\u012e\u012c"+ + "\3\2\2\2\u012e\u012f\3\2\2\2\u012f\u0133\3\2\2\2\u0130\u0131\7/\2\2\u0131"+ + "\u0132\7\17\2\2\u0132\u0134\5\26\f\2\u0133\u0130\3\2\2\2\u0133\u0134\3"+ + "\2\2\2\u0134\u0137\3\2\2\2\u0135\u0136\7\60\2\2\u0136\u0138\5\66\34\2"+ + "\u0137\u0135\3\2\2\2\u0137\u0138\3\2\2\2\u0138\23\3\2\2\2\u0139\u013a"+ + "\7*\2\2\u013a\u013f\5$\23\2\u013b\u013c\7\5\2\2\u013c\u013e\5$\23\2\u013d"+ + "\u013b\3\2\2\2\u013e\u0141\3\2\2\2\u013f\u013d\3\2\2\2\u013f\u0140\3\2"+ + "\2\2\u0140\u0143\3\2\2\2\u0141\u013f\3\2\2\2\u0142\u0144\5.\30\2\u0143"+ + "\u0142\3\2\2\2\u0143\u0144\3\2\2\2\u0144\25\3\2\2\2\u0145\u0147\5\36\20"+ + "\2\u0146\u0145\3\2\2\2\u0146\u0147\3\2\2\2\u0147\u0148\3\2\2\2\u0148\u014d"+ + "\5\30\r\2\u0149\u014a\7\5\2\2\u014a\u014c\5\30\r\2\u014b\u0149\3\2\2\2"+ + "\u014c\u014f\3\2\2\2\u014d\u014b\3\2\2\2\u014d\u014e\3\2\2\2\u014e\27"+ + "\3\2\2\2\u014f\u014d\3\2\2\2\u0150\u0151\5\32\16\2\u0151\31\3\2\2\2\u0152"+ + "\u015b\7\3\2\2\u0153\u0158\5\64\33\2\u0154\u0155\7\5\2\2\u0155\u0157\5"+ + "\64\33\2\u0156\u0154\3\2\2\2\u0157\u015a\3\2\2\2\u0158\u0156\3\2\2\2\u0158"+ + "\u0159\3\2\2\2\u0159\u015c\3\2\2\2\u015a\u0158\3\2\2\2\u015b\u0153\3\2"+ + "\2\2\u015b\u015c\3\2\2\2\u015c\u015d\3\2\2\2\u015d\u0160\7\4\2\2\u015e"+ + "\u0160\5\64\33\2\u015f\u0152\3\2\2\2\u015f\u015e\3\2\2\2\u0160\33\3\2"+ + "\2\2\u0161\u0162\5h\65\2\u0162\u0163\7\f\2\2\u0163\u0164\7\3\2\2\u0164"+ + "\u0165\5\n\6\2\u0165\u0166\7\4\2\2\u0166\35\3\2\2\2\u0167\u0168\t\13\2"+ + "\2\u0168\37\3\2\2\2\u0169\u016e\5\"\22\2\u016a\u016b\7\5\2\2\u016b\u016d"+ + "\5\"\22\2\u016c\u016a\3\2\2\2\u016d\u0170\3\2\2\2\u016e\u016c\3\2\2\2"+ + "\u016e\u016f\3\2\2\2\u016f!\3\2\2\2\u0170\u016e\3\2\2\2\u0171\u0176\5"+ + "\64\33\2\u0172\u0174\7\f\2\2\u0173\u0172\3\2\2\2\u0173\u0174\3\2\2\2\u0174"+ + "\u0175\3\2\2\2\u0175\u0177\5h\65\2\u0176\u0173\3\2\2\2\u0176\u0177\3\2"+ + "\2\2\u0177#\3\2\2\2\u0178\u017c\5,\27\2\u0179\u017b\5&\24\2\u017a\u0179"+ + "\3\2\2\2\u017b\u017e\3\2\2\2\u017c\u017a\3\2\2\2\u017c\u017d\3\2\2\2\u017d"+ + "%\3\2\2\2\u017e\u017c\3\2\2\2\u017f\u0180\5(\25\2\u0180\u0181\78\2\2\u0181"+ + "\u0183\5,\27\2\u0182\u0184\5*\26\2\u0183\u0182\3\2\2\2\u0183\u0184\3\2"+ + "\2\2\u0184\u018b\3\2\2\2\u0185\u0186\7C\2\2\u0186\u0187\5(\25\2\u0187"+ + "\u0188\78\2\2\u0188\u0189\5,\27\2\u0189\u018b\3\2\2\2\u018a\u017f\3\2"+ + "\2\2\u018a\u0185\3\2\2\2\u018b\'\3\2\2\2\u018c\u018e\7\65\2\2\u018d\u018c"+ + "\3\2\2\2\u018d\u018e\3\2\2\2\u018e\u019c\3\2\2\2\u018f\u0191\7:\2\2\u0190"+ + "\u0192\7K\2\2\u0191\u0190\3\2\2\2\u0191\u0192\3\2\2\2\u0192\u019c\3\2"+ + "\2\2\u0193\u0195\7P\2\2\u0194\u0196\7K\2\2\u0195\u0194\3\2\2\2\u0195\u0196"+ + "\3\2\2\2\u0196\u019c\3\2\2\2\u0197\u0199\7,\2\2\u0198\u019a\7K\2\2\u0199"+ + "\u0198\3\2\2\2\u0199\u019a\3\2\2\2\u019a\u019c\3\2\2\2\u019b\u018d\3\2"+ + "\2\2\u019b\u018f\3\2\2\2\u019b\u0193\3\2\2\2\u019b\u0197\3\2\2\2\u019c"+ + ")\3\2\2\2\u019d\u019e\7G\2\2\u019e\u01ac\5\66\34\2\u019f\u01a0\7a\2\2"+ + "\u01a0\u01a1\7\3\2\2\u01a1\u01a6\5h\65\2\u01a2\u01a3\7\5\2\2\u01a3\u01a5"+ + "\5h\65\2\u01a4\u01a2\3\2\2\2\u01a5\u01a8\3\2\2\2\u01a6\u01a4\3\2\2\2\u01a6"+ + "\u01a7\3\2\2\2\u01a7\u01a9\3\2\2\2\u01a8\u01a6\3\2\2\2\u01a9\u01aa\7\4"+ + "\2\2\u01aa\u01ac\3\2\2\2\u01ab\u019d\3\2\2\2\u01ab\u019f\3\2\2\2\u01ac"+ + "+\3\2\2\2\u01ad\u01af\7+\2\2\u01ae\u01ad\3\2\2\2\u01ae\u01af\3\2\2\2\u01af"+ + "\u01b0\3\2\2\2\u01b0\u01b5\5j\66\2\u01b1\u01b3\7\f\2\2\u01b2\u01b1\3\2"+ + "\2\2\u01b2\u01b3\3\2\2\2\u01b3\u01b4\3\2\2\2\u01b4\u01b6\5f\64\2\u01b5"+ + "\u01b2\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6\u01ca\3\2\2\2\u01b7\u01b8\7\3"+ + "\2\2\u01b8\u01b9\5\n\6\2\u01b9\u01be\7\4\2\2\u01ba\u01bc\7\f\2\2\u01bb"+ + "\u01ba\3\2\2\2\u01bb\u01bc\3\2\2\2\u01bc\u01bd\3\2\2\2\u01bd\u01bf\5f"+ + "\64\2\u01be\u01bb\3\2\2\2\u01be\u01bf\3\2\2\2\u01bf\u01ca\3\2\2\2\u01c0"+ + "\u01c1\7\3\2\2\u01c1\u01c2\5$\23\2\u01c2\u01c7\7\4\2\2\u01c3\u01c5\7\f"+ + "\2\2\u01c4\u01c3\3\2\2\2\u01c4\u01c5\3\2\2\2\u01c5\u01c6\3\2\2\2\u01c6"+ + "\u01c8\5f\64\2\u01c7\u01c4\3\2\2\2\u01c7\u01c8\3\2\2\2\u01c8\u01ca\3\2"+ + "\2\2\u01c9\u01ae\3\2\2\2\u01c9\u01b7\3\2\2\2\u01c9\u01c0\3\2\2\2\u01ca"+ + "-\3\2\2\2\u01cb\u01cc\7N\2\2\u01cc\u01cd\7\3\2\2\u01cd\u01ce\5\60\31\2"+ + "\u01ce\u01cf\7(\2\2\u01cf\u01d0\5f\64\2\u01d0\u01d1\7\63\2\2\u01d1\u01d2"+ + "\7\3\2\2\u01d2\u01d3\5\60\31\2\u01d3\u01d4\7\4\2\2\u01d4\u01d5\7\4\2\2"+ + "\u01d5/\3\2\2\2\u01d6\u01db\5\62\32\2\u01d7\u01d8\7\5\2\2\u01d8\u01da"+ + "\5\62\32\2\u01d9\u01d7\3\2\2\2\u01da\u01dd\3\2\2\2\u01db\u01d9\3\2\2\2"+ + "\u01db\u01dc\3\2\2\2\u01dc\61\3\2\2\2\u01dd\u01db\3\2\2\2\u01de\u01e3"+ + "\5D#\2\u01df\u01e1\7\f\2\2\u01e0\u01df\3\2\2\2\u01e0\u01e1\3\2\2\2\u01e1"+ + "\u01e2\3\2\2\2\u01e2\u01e4\5h\65\2\u01e3\u01e0\3\2\2\2\u01e3\u01e4\3\2"+ + "\2\2\u01e4\63\3\2\2\2\u01e5\u01e6\5\66\34\2\u01e6\65\3\2\2\2\u01e7\u01e8"+ + "\b\34\1\2\u01e8\u01e9\7D\2\2\u01e9\u0207\5\66\34\n\u01ea\u01eb\7#\2\2"+ + "\u01eb\u01ec\7\3\2\2\u01ec\u01ed\5\b\5\2\u01ed\u01ee\7\4\2\2\u01ee\u0207"+ + "\3\2\2\2\u01ef\u01f0\7R\2\2\u01f0\u01f1\7\3\2\2\u01f1\u01f2\5r:\2\u01f2"+ + "\u01f3\58\35\2\u01f3\u01f4\7\4\2\2\u01f4\u0207\3\2\2\2\u01f5\u01f6\7>"+ + "\2\2\u01f6\u01f7\7\3\2\2\u01f7\u01f8\5f\64\2\u01f8\u01f9\7\5\2\2\u01f9"+ + "\u01fa\5r:\2\u01fa\u01fb\58\35\2\u01fb\u01fc\7\4\2\2\u01fc\u0207\3\2\2"+ + "\2\u01fd\u01fe\7>\2\2\u01fe\u01ff\7\3\2\2\u01ff\u0200\5r:\2\u0200\u0201"+ + "\7\5\2\2\u0201\u0202\5r:\2\u0202\u0203\58\35\2\u0203\u0204\7\4\2\2\u0204"+ + "\u0207\3\2\2\2\u0205\u0207\5:\36\2\u0206\u01e7\3\2\2\2\u0206\u01ea\3\2"+ + "\2\2\u0206\u01ef\3\2\2\2\u0206\u01f5\3\2\2\2\u0206\u01fd\3\2\2\2\u0206"+ + "\u0205\3\2\2\2\u0207\u0210\3\2\2\2\u0208\u0209\f\4\2\2\u0209\u020a\7\n"+ + "\2\2\u020a\u020f\5\66\34\5\u020b\u020c\f\3\2\2\u020c\u020d\7I\2\2\u020d"+ + "\u020f\5\66\34\4\u020e\u0208\3\2\2\2\u020e\u020b\3\2\2\2\u020f\u0212\3"+ + "\2\2\2\u0210\u020e\3\2\2\2\u0210\u0211\3\2\2\2\u0211\67\3\2\2\2\u0212"+ + "\u0210\3\2\2\2\u0213\u0214\7\5\2\2\u0214\u0216\5r:\2\u0215\u0213\3\2\2"+ + "\2\u0216\u0219\3\2\2\2\u0217\u0215\3\2\2\2\u0217\u0218\3\2\2\2\u02189"+ + "\3\2\2\2\u0219\u0217\3\2\2\2\u021a\u021c\5D#\2\u021b\u021d\5<\37\2\u021c"+ + "\u021b\3\2\2\2\u021c\u021d\3\2\2\2\u021d;\3\2\2\2\u021e\u0220\7D\2\2\u021f"+ + "\u021e\3\2\2\2\u021f\u0220\3\2\2\2\u0220\u0221\3\2\2\2\u0221\u0222\7\16"+ + "\2\2\u0222\u0223\5D#\2\u0223\u0224\7\n\2\2\u0224\u0225\5D#\2\u0225\u024d"+ + "\3\2\2\2\u0226\u0228\7D\2\2\u0227\u0226\3\2\2\2\u0227\u0228\3\2\2\2\u0228"+ + "\u0229\3\2\2\2\u0229\u022a\7\63\2\2\u022a\u022b\7\3\2\2\u022b\u0230\5"+ + "D#\2\u022c\u022d\7\5\2\2\u022d\u022f\5D#\2\u022e\u022c\3\2\2\2\u022f\u0232"+ + "\3\2\2\2\u0230\u022e\3\2\2\2\u0230\u0231\3\2\2\2\u0231\u0233\3\2\2\2\u0232"+ + "\u0230\3\2\2\2\u0233\u0234\7\4\2\2\u0234\u024d\3\2\2\2\u0235\u0237\7D"+ + "\2\2\u0236\u0235\3\2\2\2\u0236\u0237\3\2\2\2\u0237\u0238\3\2\2\2\u0238"+ + "\u0239\7\63\2\2\u0239\u023a\7\3\2\2\u023a\u023b\5\b\5\2\u023b\u023c\7"+ + "\4\2\2\u023c\u024d\3\2\2\2\u023d\u023f\7D\2\2\u023e\u023d\3\2\2\2\u023e"+ + "\u023f\3\2\2\2\u023f\u0240\3\2\2\2\u0240\u0241\7;\2\2\u0241\u024d\5@!"+ + "\2\u0242\u0244\7D\2\2\u0243\u0242\3\2\2\2\u0243\u0244\3\2\2\2\u0244\u0245"+ + "\3\2\2\2\u0245\u0246\7Q\2\2\u0246\u024d\5r:\2\u0247\u0249\7\67\2\2\u0248"+ + "\u024a\7D\2\2\u0249\u0248\3\2\2\2\u0249\u024a\3\2\2\2\u024a\u024b\3\2"+ + "\2\2\u024b\u024d\7E\2\2\u024c\u021f\3\2\2\2\u024c\u0227\3\2\2\2\u024c"+ + "\u0236\3\2\2\2\u024c\u023e\3\2\2\2\u024c\u0243\3\2\2\2\u024c\u0247\3\2"+ + "\2\2\u024d=\3\2\2\2\u024e\u024f\7;\2\2\u024f\u0250\5@!\2\u0250?\3\2\2"+ + "\2\u0251\u0253\5r:\2\u0252\u0254\5B\"\2\u0253\u0252\3\2\2\2\u0253\u0254"+ + "\3\2\2\2\u0254A\3\2\2\2\u0255\u0256\7!\2\2\u0256\u025c\5r:\2\u0257\u0258"+ + "\7h\2\2\u0258\u0259\5r:\2\u0259\u025a\7o\2\2\u025a\u025c\3\2\2\2\u025b"+ + "\u0255\3\2\2\2\u025b\u0257\3\2\2\2\u025cC\3\2\2\2\u025d\u025e\b#\1\2\u025e"+ + "\u0262\5F$\2\u025f\u0260\t\7\2\2\u0260\u0262\5D#\6\u0261\u025d\3\2\2\2"+ + "\u0261\u025f\3\2\2\2\u0262\u026f\3\2\2\2\u0263\u0264\f\5\2\2\u0264\u0265"+ + "\t\f\2\2\u0265\u026e\5D#\6\u0266\u0267\f\4\2\2\u0267\u0268\t\7\2\2\u0268"+ + "\u026e\5D#\5\u0269\u026a\f\3\2\2\u026a\u026b\5\\/\2\u026b\u026c\5D#\4"+ + "\u026c\u026e\3\2\2\2\u026d\u0263\3\2\2\2\u026d\u0266\3\2\2\2\u026d\u0269"+ + "\3\2\2\2\u026e\u0271\3\2\2\2\u026f\u026d\3\2\2\2\u026f\u0270\3\2\2\2\u0270"+ + "E\3\2\2\2\u0271\u026f\3\2\2\2\u0272\u0273\b$\1\2\u0273\u0297\5J&\2\u0274"+ + "\u0297\5P)\2\u0275\u0297\5H%\2\u0276\u0297\5Z.\2\u0277\u0278\5f\64\2\u0278"+ + "\u0279\7~\2\2\u0279\u027b\3\2\2\2\u027a\u0277\3\2\2\2\u027a\u027b\3\2"+ + "\2\2\u027b\u027c\3\2\2\2\u027c\u0297\7y\2\2\u027d\u0297\5T+\2\u027e\u027f"+ + "\7\3\2\2\u027f\u0280\5\b\5\2\u0280\u0281\7\4\2\2\u0281\u0297\3\2\2\2\u0282"+ + "\u0297\5f\64\2\u0283\u0284\7\3\2\2\u0284\u0285\5\64\33\2\u0285\u0286\7"+ + "\4\2\2\u0286\u0297\3\2\2\2\u0287\u0289\7\20\2\2\u0288\u028a\5\66\34\2"+ + "\u0289\u0288\3\2\2\2\u0289\u028a\3\2\2\2\u028a\u028c\3\2\2\2\u028b\u028d"+ + "\5t;\2\u028c\u028b\3\2\2\2\u028d\u028e\3\2\2\2\u028e\u028c\3\2\2\2\u028e"+ + "\u028f\3\2\2\2\u028f\u0292\3\2\2\2\u0290\u0291\7\37\2\2\u0291\u0293\5"+ + "\66\34\2\u0292\u0290\3\2\2\2\u0292\u0293\3\2\2\2\u0293\u0294\3\2\2\2\u0294"+ + "\u0295\7 \2\2\u0295\u0297\3\2\2\2\u0296\u0272\3\2\2\2\u0296\u0274\3\2"+ + "\2\2\u0296\u0275\3\2\2\2\u0296\u0276\3\2\2\2\u0296\u027a\3\2\2\2\u0296"+ + "\u027d\3\2\2\2\u0296\u027e\3\2\2\2\u0296\u0282\3\2\2\2\u0296\u0283\3\2"+ + "\2\2\u0296\u0287\3\2\2\2\u0297\u029d\3\2\2\2\u0298\u0299\f\f\2\2\u0299"+ + "\u029a\7|\2\2\u029a\u029c\5d\63\2\u029b\u0298\3\2\2\2\u029c\u029f\3\2"+ + "\2\2\u029d\u029b\3\2\2\2\u029d\u029e\3\2\2\2\u029eG\3\2\2\2\u029f\u029d"+ + "\3\2\2\2\u02a0\u02a4\7\30\2\2\u02a1\u02a4\7\26\2\2\u02a2\u02a4\7\27\2"+ + "\2\u02a3\u02a0\3\2\2\2\u02a3\u02a1\3\2\2\2\u02a3\u02a2\3\2\2\2\u02a4I"+ + "\3\2\2\2\u02a5\u02b0\5L\'\2\u02a6\u02a7\7i\2\2\u02a7\u02a8\5L\'\2\u02a8"+ + "\u02a9\7o\2\2\u02a9\u02b0\3\2\2\2\u02aa\u02b0\5N(\2\u02ab\u02ac\7i\2\2"+ + "\u02ac\u02ad\5N(\2\u02ad\u02ae\7o\2\2\u02ae\u02b0\3\2\2\2\u02af\u02a5"+ + "\3\2\2\2\u02af\u02a6\3\2\2\2\u02af\u02aa\3\2\2\2\u02af\u02ab\3\2\2\2\u02b0"+ + "K\3\2\2\2\u02b1\u02b2\7\21\2\2\u02b2\u02b3\7\3\2\2\u02b3\u02b4\5\64\33"+ + "\2\u02b4\u02b5\7\f\2\2\u02b5\u02b6\5d\63\2\u02b6\u02b7\7\4\2\2\u02b7M"+ + "\3\2\2\2\u02b8\u02b9\7\25\2\2\u02b9\u02ba\7\3\2\2\u02ba\u02bb\5\64\33"+ + "\2\u02bb\u02bc\7\5\2\2\u02bc\u02bd\5d\63\2\u02bd\u02be\7\4\2\2\u02beO"+ + "\3\2\2\2\u02bf\u02c5\5R*\2\u02c0\u02c1\7i\2\2\u02c1\u02c2\5R*\2\u02c2"+ + "\u02c3\7o\2\2\u02c3\u02c5\3\2\2\2\u02c4\u02bf\3\2\2\2\u02c4\u02c0\3\2"+ + "\2\2\u02c5Q\3\2\2\2\u02c6\u02c7\7%\2\2\u02c7\u02c8\7\3\2\2\u02c8\u02c9"+ + "\5h\65\2\u02c9\u02ca\7*\2\2\u02ca\u02cb\5D#\2\u02cb\u02cc\7\4\2\2\u02cc"+ + "S\3\2\2\2\u02cd\u02d3\5V,\2\u02ce\u02cf\7i\2\2\u02cf\u02d0\5V,\2\u02d0"+ + "\u02d1\7o\2\2\u02d1\u02d3\3\2\2\2\u02d2\u02cd\3\2\2\2\u02d2\u02ce\3\2"+ + "\2\2\u02d3U\3\2\2\2\u02d4\u02d5\5X-\2\u02d5\u02e1\7\3\2\2\u02d6\u02d8"+ + "\5\36\20\2\u02d7\u02d6\3\2\2\2\u02d7\u02d8\3\2\2\2\u02d8\u02d9\3\2\2\2"+ + "\u02d9\u02de\5\64\33\2\u02da\u02db\7\5\2\2\u02db\u02dd\5\64\33\2\u02dc"+ + "\u02da\3\2\2\2\u02dd\u02e0\3\2\2\2\u02de\u02dc\3\2\2\2\u02de\u02df\3\2"+ + "\2\2\u02df\u02e2\3\2\2\2\u02e0\u02de\3\2\2\2\u02e1\u02d7\3\2\2\2\u02e1"+ + "\u02e2\3\2\2\2\u02e2\u02e3\3\2\2\2\u02e3\u02e4\7\4\2\2\u02e4W\3\2\2\2"+ + "\u02e5\u02e9\7:\2\2\u02e6\u02e9\7P\2\2\u02e7\u02e9\5h\65\2\u02e8\u02e5"+ + "\3\2\2\2\u02e8\u02e6\3\2\2\2\u02e8\u02e7\3\2\2\2\u02e9Y\3\2\2\2\u02ea"+ + "\u0305\7E\2\2\u02eb\u0305\5`\61\2\u02ec\u0305\5p9\2\u02ed\u0305\5^\60"+ + "\2\u02ee\u02f0\7\u0080\2\2\u02ef\u02ee\3\2\2\2\u02f0\u02f1\3\2\2\2\u02f1"+ + "\u02ef\3\2\2\2\u02f1\u02f2\3\2\2\2\u02f2\u0305\3\2\2\2\u02f3\u0305\7\177"+ + "\2\2\u02f4\u02f5\7k\2\2\u02f5\u02f6\5r:\2\u02f6\u02f7\7o\2\2\u02f7\u0305"+ + "\3\2\2\2\u02f8\u02f9\7l\2\2\u02f9\u02fa\5r:\2\u02fa\u02fb\7o\2\2\u02fb"+ + "\u0305\3\2\2\2\u02fc\u02fd\7m\2\2\u02fd\u02fe\5r:\2\u02fe\u02ff\7o\2\2"+ + "\u02ff\u0305\3\2\2\2\u0300\u0301\7n\2\2\u0301\u0302\5r:\2\u0302\u0303"+ + "\7o\2\2\u0303\u0305\3\2\2\2\u0304\u02ea\3\2\2\2\u0304\u02eb\3\2\2\2\u0304"+ + "\u02ec\3\2\2\2\u0304\u02ed\3\2\2\2\u0304\u02ef\3\2\2\2\u0304\u02f3\3\2"+ + "\2\2\u0304\u02f4\3\2\2\2\u0304\u02f8\3\2\2\2\u0304\u02fc\3\2\2\2\u0304"+ + "\u0300\3\2\2\2\u0305[\3\2\2\2\u0306\u0307\t\r\2\2\u0307]\3\2\2\2\u0308"+ + "\u0309\t\16\2\2\u0309_\3\2\2\2\u030a\u030c\7\66\2\2\u030b\u030d\t\7\2"+ + "\2\u030c\u030b\3\2\2\2\u030c\u030d\3\2\2\2\u030d\u0310\3\2\2\2\u030e\u0311"+ + "\5p9\2\u030f\u0311\5r:\2\u0310\u030e\3\2\2\2\u0310\u030f\3\2\2\2\u0311"+ + "\u0312\3\2\2\2\u0312\u0315\5b\62\2\u0313\u0314\7^\2\2\u0314\u0316\5b\62"+ + "\2\u0315\u0313\3\2\2\2\u0315\u0316\3\2\2\2\u0316a\3\2\2\2\u0317\u0318"+ + "\t\17\2\2\u0318c\3\2\2\2\u0319\u031a\5h\65\2\u031ae\3\2\2\2\u031b\u031c"+ + "\5h\65\2\u031c\u031d\7~\2\2\u031d\u031f\3\2\2\2\u031e\u031b\3\2\2\2\u031f"+ + "\u0322\3\2\2\2\u0320\u031e\3\2\2\2\u0320\u0321\3\2\2\2\u0321\u0323\3\2"+ + "\2\2\u0322\u0320\3\2\2\2\u0323\u0324\5h\65\2\u0324g\3\2\2\2\u0325\u0328"+ + "\5l\67\2\u0326\u0328\5n8\2\u0327\u0325\3\2\2\2\u0327\u0326\3\2\2\2\u0328"+ + "i\3\2\2\2\u0329\u032a\5h\65\2\u032a\u032b\7\6\2\2\u032b\u032d\3\2\2\2"+ + "\u032c\u0329\3\2\2\2\u032c\u032d\3\2\2\2\u032d\u032e\3\2\2\2\u032e\u0336"+ + "\7\u0085\2\2\u032f\u0330\5h\65\2\u0330\u0331\7\6\2\2\u0331\u0333\3\2\2"+ + "\2\u0332\u032f\3\2\2\2\u0332\u0333\3\2\2\2\u0333\u0334\3\2\2\2\u0334\u0336"+ + "\5h\65\2\u0335\u032c\3\2\2\2\u0335\u0332\3\2\2\2\u0336k\3\2\2\2\u0337"+ + "\u033a\7\u0086\2\2\u0338\u033a\7\u0087\2\2\u0339\u0337\3\2\2\2\u0339\u0338"+ + "\3\2\2\2\u033am\3\2\2\2\u033b\u033f\7\u0083\2\2\u033c\u033f\5v<\2\u033d"+ + "\u033f\7\u0084\2\2\u033e\u033b\3\2\2\2\u033e\u033c\3\2\2\2\u033e\u033d"+ + "\3\2\2\2\u033fo\3\2\2\2\u0340\u0343\7\u0082\2\2\u0341\u0343\7\u0081\2"+ + "\2\u0342\u0340\3\2\2\2\u0342\u0341\3\2\2\2\u0343q\3\2\2\2\u0344\u0345"+ + "\t\20\2\2\u0345s\3\2\2\2\u0346\u0347\7c\2\2\u0347\u0348\5\64\33\2\u0348"+ + "\u0349\7\\\2\2\u0349\u034a\5\64\33\2\u034au\3\2\2\2\u034b\u034c\t\21\2"+ + "\2\u034cw\3\2\2\2s\u0087\u0089\u008d\u0096\u0098\u009c\u00a3\u00a7\u00ad"+ + "\u00b2\u00b7\u00bb\u00c0\u00c8\u00cc\u00d4\u00d7\u00dd\u00e2\u00e5\u00ea"+ + "\u00ed\u00ef\u00f7\u00fa\u0106\u0109\u010c\u0113\u011a\u011e\u0122\u0126"+ + "\u012a\u012e\u0133\u0137\u013f\u0143\u0146\u014d\u0158\u015b\u015f\u016e"+ + "\u0173\u0176\u017c\u0183\u018a\u018d\u0191\u0195\u0199\u019b\u01a6\u01ab"+ + "\u01ae\u01b2\u01b5\u01bb\u01be\u01c4\u01c7\u01c9\u01db\u01e0\u01e3\u0206"+ + "\u020e\u0210\u0217\u021c\u021f\u0227\u0230\u0236\u023e\u0243\u0249\u024c"+ + "\u0253\u025b\u0261\u026d\u026f\u027a\u0289\u028e\u0292\u0296\u029d\u02a3"+ + "\u02af\u02c4\u02d2\u02d7\u02de\u02e1\u02e8\u02f1\u0304\u030c\u0310\u0315"+ + "\u0320\u0327\u032c\u0332\u0335\u0339\u033e\u0342"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java index 7f44a1593c2e..bc8d06c1dcc3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java @@ -173,6 +173,12 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitSetQuantifier(SqlBaseParser.SetQuantifierContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#selectItems}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitSelectItems(SqlBaseParser.SelectItemsContext ctx); /** * Visit a parse tree produced by the {@code selectExpression} * labeled alternative in {@link SqlBaseParser#selectItem}. @@ -225,6 +231,24 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitAliasedRelation(SqlBaseParser.AliasedRelationContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#pivotClause}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPivotClause(SqlBaseParser.PivotClauseContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#pivotArgs}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPivotArgs(SqlBaseParser.PivotArgsContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#namedValueExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNamedValueExpression(SqlBaseParser.NamedValueExpressionContext ctx); /** * Visit a parse tree produced by {@link SqlBaseParser#expression}. * @param ctx the parse tree diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Aggregate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Aggregate.java index 35d93e3a68c8..39fef8188b27 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Aggregate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Aggregate.java @@ -10,8 +10,8 @@ import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.NamedExpression; -import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import java.util.List; import java.util.Objects; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Pivot.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Pivot.java new file mode 100644 index 000000000000..4a0639d8b78b --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/Pivot.java @@ -0,0 +1,142 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.plan.logical; + +import org.elasticsearch.xpack.sql.capabilities.Resolvables; +import org.elasticsearch.xpack.sql.expression.Attribute; +import org.elasticsearch.xpack.sql.expression.AttributeSet; +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.ExpressionId; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.NamedExpression; +import org.elasticsearch.xpack.sql.expression.function.Function; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +import static java.util.Collections.singletonList; + +public class Pivot extends UnaryPlan { + + private final Expression column; + private final List values; + private final List aggregates; + // derived properties + private AttributeSet groupingSet; + private AttributeSet valueOutput; + private List output; + + public Pivot(Source source, LogicalPlan child, Expression column, List values, List aggregates) { + super(source, child); + this.column = column; + this.values = values; + this.aggregates = aggregates; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Pivot::new, child(), column, values, aggregates); + } + + @Override + protected Pivot replaceChild(LogicalPlan newChild) { + return new Pivot(source(), newChild, column, values, aggregates); + } + + public Expression column() { + return column; + } + + public List values() { + return values; + } + + public List aggregates() { + return aggregates; + } + + public AttributeSet groupingSet() { + if (groupingSet == null) { + AttributeSet columnSet = Expressions.references(singletonList(column)); + // grouping can happen only on "primitive" fields, thus exclude multi-fields or nested docs + // the verifier enforces this rule so it does not catch folks by surprise + groupingSet = new AttributeSet(Expressions.onlyPrimitiveFieldAttributes(child().output())) + // make sure to have the column as the last entry (helps with translation) + .subtract(columnSet) + .subtract(Expressions.references(aggregates)) + .combine(columnSet); + } + return groupingSet; + } + + public AttributeSet valuesOutput() { + // TODO: the generated id is a hack since it can clash with other potentially generated ids + if (valueOutput == null) { + List out = new ArrayList<>(aggregates.size() * values.size()); + if (aggregates.size() == 1) { + NamedExpression agg = aggregates.get(0); + for (NamedExpression value : values) { + ExpressionId id = new ExpressionId(agg.id().hashCode() + value.id().hashCode()); + out.add(value.toAttribute().withDataType(agg.dataType()).withId(id)); + } + } + // for multiple args, concat the function and the value + else { + for (NamedExpression agg : aggregates) { + String name = agg instanceof Function ? ((Function) agg).functionName() : agg.name(); + for (NamedExpression value : values) { + ExpressionId id = new ExpressionId(agg.id().hashCode() + value.id().hashCode()); + out.add(value.toAttribute().withName(value.name() + "_" + name).withDataType(agg.dataType()).withId(id)); + } + } + } + valueOutput = new AttributeSet(out); + } + return valueOutput; + } + + @Override + public List output() { + if (output == null) { + output = new ArrayList<>(groupingSet() + .subtract(Expressions.references(singletonList(column))) + .combine(valuesOutput())); + } + + return output; + } + + @Override + public boolean expressionsResolved() { + return column.resolved() && Resolvables.resolved(values) && Resolvables.resolved(aggregates); + } + + @Override + public int hashCode() { + return Objects.hash(column, values, aggregates, child()); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + Pivot other = (Pivot) obj; + return Objects.equals(column, other.column) + && Objects.equals(values, other.values) + && Objects.equals(aggregates, other.aggregates) + && Objects.equals(child(), other.child()); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/PivotExec.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/PivotExec.java new file mode 100644 index 000000000000..579a53696eec --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/PivotExec.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.plan.physical; + +import org.elasticsearch.xpack.sql.expression.Attribute; +import org.elasticsearch.xpack.sql.plan.logical.Pivot; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; + +import java.util.List; +import java.util.Objects; + +public class PivotExec extends UnaryExec implements Unexecutable { + + private final Pivot pivot; + + public PivotExec(Source source, PhysicalPlan child, Pivot pivot) { + super(source, child); + this.pivot = pivot; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, PivotExec::new, child(), pivot); + } + + @Override + protected PivotExec replaceChild(PhysicalPlan newChild) { + return new PivotExec(source(), newChild, pivot); + } + + @Override + public List output() { + return pivot.output(); + } + + public Pivot pivot() { + return pivot; + } + + @Override + public int hashCode() { + return Objects.hash(pivot, child()); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + PivotExec other = (PivotExec) obj; + + return Objects.equals(pivot, other.pivot) + && Objects.equals(child(), other.child()); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java index b32ad961ae95..522d5a944dc5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.sql.plan.logical.Limit; import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.OrderBy; +import org.elasticsearch.xpack.sql.plan.logical.Pivot; import org.elasticsearch.xpack.sql.plan.logical.Project; import org.elasticsearch.xpack.sql.plan.logical.With; import org.elasticsearch.xpack.sql.plan.logical.command.Command; @@ -25,6 +26,7 @@ import org.elasticsearch.xpack.sql.plan.physical.LimitExec; import org.elasticsearch.xpack.sql.plan.physical.LocalExec; import org.elasticsearch.xpack.sql.plan.physical.OrderExec; import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.sql.plan.physical.PivotExec; import org.elasticsearch.xpack.sql.plan.physical.ProjectExec; import org.elasticsearch.xpack.sql.plan.physical.UnplannedExec; import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer; @@ -88,6 +90,11 @@ class Mapper extends RuleExecutor { return new AggregateExec(p.source(), map(a.child()), a.groupings(), a.aggregates()); } + if (p instanceof Pivot) { + Pivot pv = (Pivot) p; + return new PivotExec(pv.source(), map(pv.child()), pv); + } + if (p instanceof EsRelation) { EsRelation c = (EsRelation) p; List output = c.output(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java index ae875d6fc6e6..3931ada38366 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java @@ -8,10 +8,13 @@ package org.elasticsearch.xpack.sql.planner; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.execution.search.AggRef; +import org.elasticsearch.xpack.sql.execution.search.FieldExtraction; import org.elasticsearch.xpack.sql.expression.Alias; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.AttributeMap; +import org.elasticsearch.xpack.sql.expression.AttributeSet; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.ExpressionId; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Foldables; import org.elasticsearch.xpack.sql.expression.NamedExpression; @@ -32,6 +35,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.AggPathInput; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.UnaryPipe; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.plan.logical.Pivot; import org.elasticsearch.xpack.sql.plan.physical.AggregateExec; import org.elasticsearch.xpack.sql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.sql.plan.physical.FilterExec; @@ -39,6 +43,7 @@ import org.elasticsearch.xpack.sql.plan.physical.LimitExec; import org.elasticsearch.xpack.sql.plan.physical.LocalExec; import org.elasticsearch.xpack.sql.plan.physical.OrderExec; import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.sql.plan.physical.PivotExec; import org.elasticsearch.xpack.sql.plan.physical.ProjectExec; import org.elasticsearch.xpack.sql.planner.QueryTranslator.GroupingContext; import org.elasticsearch.xpack.sql.planner.QueryTranslator.QueryTranslation; @@ -52,6 +57,7 @@ import org.elasticsearch.xpack.sql.querydsl.container.GlobalCountRef; import org.elasticsearch.xpack.sql.querydsl.container.GroupByRef; import org.elasticsearch.xpack.sql.querydsl.container.GroupByRef.Property; import org.elasticsearch.xpack.sql.querydsl.container.MetricAggRef; +import org.elasticsearch.xpack.sql.querydsl.container.PivotColumnRef; import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer; import org.elasticsearch.xpack.sql.querydsl.container.ScoreSort; import org.elasticsearch.xpack.sql.querydsl.container.ScriptSort; @@ -64,14 +70,17 @@ import org.elasticsearch.xpack.sql.rule.RuleExecutor; import org.elasticsearch.xpack.sql.session.EmptyExecutable; import org.elasticsearch.xpack.sql.util.Check; +import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.xpack.sql.planner.QueryTranslator.and; import static org.elasticsearch.xpack.sql.planner.QueryTranslator.toAgg; import static org.elasticsearch.xpack.sql.planner.QueryTranslator.toQuery; +import static org.elasticsearch.xpack.sql.util.CollectionUtils.combine; /** * Folds the PhysicalPlan into a {@link Query}. @@ -85,6 +94,7 @@ class QueryFolder extends RuleExecutor { @Override protected Iterable.Batch> batches() { Batch rollup = new Batch("Fold queries", + new FoldPivot(), new FoldAggregate(), new FoldProject(), new FoldFilter(), @@ -149,7 +159,8 @@ class QueryFolder extends RuleExecutor { queryC.sort(), queryC.limit(), queryC.shouldTrackHits(), - queryC.shouldIncludeFrozen()); + queryC.shouldIncludeFrozen(), + queryC.minPageSize()); return new EsQueryExec(exec.source(), exec.index(), project.output(), clone); } return project; @@ -179,7 +190,8 @@ class QueryFolder extends RuleExecutor { qContainer.sort(), qContainer.limit(), qContainer.shouldTrackHits(), - qContainer.shouldIncludeFrozen()); + qContainer.shouldIncludeFrozen(), + qContainer.minPageSize()); return exec.with(qContainer); } @@ -204,190 +216,190 @@ class QueryFolder extends RuleExecutor { private static class FoldAggregate extends FoldingRule { @Override protected PhysicalPlan rule(AggregateExec a) { - if (a.child() instanceof EsQueryExec) { EsQueryExec exec = (EsQueryExec) a.child(); + return fold(a, exec); + } + return a; + } + + static EsQueryExec fold(AggregateExec a, EsQueryExec exec) { + // build the group aggregation + // and also collect info about it (since the group columns might be used inside the select) - // build the group aggregation - // and also collect info about it (since the group columns might be used inside the select) + GroupingContext groupingContext = QueryTranslator.groupBy(a.groupings()); - GroupingContext groupingContext = QueryTranslator.groupBy(a.groupings()); + QueryContainer queryC = exec.queryContainer(); + if (groupingContext != null) { + queryC = queryC.addGroups(groupingContext.groupMap.values()); + } - QueryContainer queryC = exec.queryContainer(); - if (groupingContext != null) { - queryC = queryC.addGroups(groupingContext.groupMap.values()); - } + Map aliases = new LinkedHashMap<>(); + // tracker for compound aggs seen in a group + Map compoundAggMap = new LinkedHashMap<>(); - Map aliases = new LinkedHashMap<>(); - // tracker for compound aggs seen in a group - Map compoundAggMap = new LinkedHashMap<>(); + // followed by actual aggregates + for (NamedExpression ne : a.aggregates()) { - // followed by actual aggregates - for (NamedExpression ne : a.aggregates()) { + // unwrap alias - it can be + // - an attribute (since we support aliases inside group-by) + // SELECT emp_no ... GROUP BY emp_no + // SELECT YEAR(hire_date) ... GROUP BY YEAR(hire_date) - // unwrap alias - it can be - // - an attribute (since we support aliases inside group-by) - // SELECT emp_no ... GROUP BY emp_no - // SELECT YEAR(hire_date) ... GROUP BY YEAR(hire_date) + // - an agg function (typically) + // SELECT COUNT(*), AVG(salary) ... GROUP BY salary; - // - an agg function (typically) - // SELECT COUNT(*), AVG(salary) ... GROUP BY salary; + // - a scalar function, which can be applied on an attribute or aggregate and can require one or multiple inputs - // - a scalar function, which can be applied on an attribute or aggregate and can require one or multiple inputs + // SELECT SIN(emp_no) ... GROUP BY emp_no + // SELECT CAST(YEAR(hire_date)) ... GROUP BY YEAR(hire_date) + // SELECT CAST(AVG(salary)) ... GROUP BY salary + // SELECT AVG(salary) + SIN(MIN(salary)) ... GROUP BY salary - // SELECT SIN(emp_no) ... GROUP BY emp_no - // SELECT CAST(YEAR(hire_date)) ... GROUP BY YEAR(hire_date) - // SELECT CAST(AVG(salary)) ... GROUP BY salary - // SELECT AVG(salary) + SIN(MIN(salary)) ... GROUP BY salary + if (ne instanceof Alias || ne instanceof Function) { + Alias as = ne instanceof Alias ? (Alias) ne : null; + Expression child = as != null ? as.child() : ne; - if (ne instanceof Alias || ne instanceof Function) { - Alias as = ne instanceof Alias ? (Alias) ne : null; - Expression child = as != null ? as.child() : ne; + // record aliases in case they are later referred in the tree + if (as != null && as.child() instanceof NamedExpression) { + aliases.put(as.toAttribute(), ((NamedExpression) as.child()).toAttribute()); + } - // record aliases in case they are later referred in the tree - if (as != null && as.child() instanceof NamedExpression) { - aliases.put(as.toAttribute(), ((NamedExpression) as.child()).toAttribute()); - } + // + // look first for scalar functions which might wrap the actual grouped target + // (e.g. + // CAST(field) GROUP BY field or + // ABS(YEAR(field)) GROUP BY YEAR(field) or + // ABS(AVG(salary)) ... GROUP BY salary + // ) + if (child instanceof ScalarFunction) { + ScalarFunction f = (ScalarFunction) child; + Pipe proc = f.asPipe(); - // - // look first for scalar functions which might wrap the actual grouped target - // (e.g. - // CAST(field) GROUP BY field or - // ABS(YEAR(field)) GROUP BY YEAR(field) or - // ABS(AVG(salary)) ... GROUP BY salary - // ) - if (child instanceof ScalarFunction) { - ScalarFunction f = (ScalarFunction) child; - Pipe proc = f.asPipe(); + final AtomicReference qC = new AtomicReference<>(queryC); - final AtomicReference qC = new AtomicReference<>(queryC); - - proc = proc.transformUp(p -> { - // bail out if the def is resolved - if (p.resolved()) { - return p; - } - - // get the backing expression and check if it belongs to a agg group or whether it's - // an expression in the first place - Expression exp = p.expression(); - GroupByKey matchingGroup = null; - if (groupingContext != null) { - // is there a group (aggregation) for this expression ? - matchingGroup = groupingContext.groupFor(exp); - } - else { - // a scalar function can be used only if has already been mentioned for grouping - // (otherwise it is the opposite of grouping) - if (exp instanceof ScalarFunction) { - throw new FoldingException(exp, "Scalar function " +exp.toString() - + " can be used only if included already in grouping"); - } - } - - // found match for expression; if it's an attribute or scalar, end the processing chain with - // the reference to the backing agg - if (matchingGroup != null) { - if (exp instanceof Attribute || exp instanceof ScalarFunction || exp instanceof GroupingFunction) { - Processor action = null; - boolean isDateBased = exp.dataType().isDateBased(); - /* - * special handling of dates since aggs return the typed Date object which needs - * extraction instead of handling this in the scroller, the folder handles this - * as it already got access to the extraction action - */ - if (exp instanceof DateTimeHistogramFunction) { - action = ((UnaryPipe) p).action(); - isDateBased = true; - } - return new AggPathInput(exp.source(), exp, - new GroupByRef(matchingGroup.id(), null, isDateBased), action); - } - } - // or found an aggregate expression (which has to work on an attribute used for grouping) - // (can happen when dealing with a root group) - if (Functions.isAggregate(exp)) { - Tuple withFunction = addAggFunction(matchingGroup, - (AggregateFunction) exp, compoundAggMap, qC.get()); - qC.set(withFunction.v1()); - return withFunction.v2(); - } - // not an aggregate and no matching - go to a higher node (likely a function YEAR(birth_date)) + proc = proc.transformUp(p -> { + // bail out if the def is resolved + if (p.resolved()) { return p; - }); - - if (!proc.resolved()) { - throw new FoldingException(child, "Cannot find grouping for '{}'", Expressions.name(child)); } - // add the computed column - queryC = qC.get().addColumn(new ComputedRef(proc), f.toAttribute()); - - // TODO: is this needed? - // redirect the alias to the scalar group id (changing the id altogether doesn't work it is - // already used in the aggpath) - //aliases.put(as.toAttribute(), sf.toAttribute()); - } - // apply the same logic above (for function inputs) to non-scalar functions with small variations: - // instead of adding things as input, add them as full blown column - else { + // get the backing expression and check if it belongs to a agg group or whether it's + // an expression in the first place + Expression exp = p.expression(); GroupByKey matchingGroup = null; if (groupingContext != null) { // is there a group (aggregation) for this expression ? - matchingGroup = groupingContext.groupFor(child); + matchingGroup = groupingContext.groupFor(exp); + } else { + // a scalar function can be used only if has already been mentioned for grouping + // (otherwise it is the opposite of grouping) + if (exp instanceof ScalarFunction) { + throw new FoldingException(exp, + "Scalar function " + exp.toString() + " can be used only if included already in grouping"); + } } - // attributes can only refer to declared groups - if (child instanceof Attribute) { - Check.notNull(matchingGroup, "Cannot find group [{}]", Expressions.name(child)); - queryC = queryC.addColumn( - new GroupByRef(matchingGroup.id(), null, child.dataType().isDateBased()), ((Attribute) child)); + + // found match for expression; if it's an attribute or scalar, end the processing chain with + // the reference to the backing agg + if (matchingGroup != null) { + if (exp instanceof Attribute || exp instanceof ScalarFunction || exp instanceof GroupingFunction) { + Processor action = null; + boolean isDateBased = exp.dataType().isDateBased(); + /* + * special handling of dates since aggs return the typed Date object which needs + * extraction instead of handling this in the scroller, the folder handles this + * as it already got access to the extraction action + */ + if (exp instanceof DateTimeHistogramFunction) { + action = ((UnaryPipe) p).action(); + isDateBased = true; + } + return new AggPathInput(exp.source(), exp, new GroupByRef(matchingGroup.id(), null, isDateBased), + action); + } } - // handle histogram - else if (child instanceof GroupingFunction) { - queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, child.dataType().isDateBased()), - ((GroupingFunction) child).toAttribute()); + // or found an aggregate expression (which has to work on an attribute used for grouping) + // (can happen when dealing with a root group) + if (Functions.isAggregate(exp)) { + Tuple withFunction = addAggFunction(matchingGroup, (AggregateFunction) exp, + compoundAggMap, qC.get()); + qC.set(withFunction.v1()); + return withFunction.v2(); } + // not an aggregate and no matching - go to a higher node (likely a function YEAR(birth_date)) + return p; + }); + + if (!proc.resolved()) { + throw new FoldingException(child, "Cannot find grouping for '{}'", Expressions.name(child)); + } + + // add the computed column + queryC = qC.get().addColumn(new ComputedRef(proc), f.toAttribute()); + + // TODO: is this needed? + // redirect the alias to the scalar group id (changing the id altogether doesn't work it is + // already used in the aggpath) + //aliases.put(as.toAttribute(), sf.toAttribute()); + } + // apply the same logic above (for function inputs) to non-scalar functions with small variations: + // instead of adding things as input, add them as full blown column + else { + GroupByKey matchingGroup = null; + if (groupingContext != null) { + // is there a group (aggregation) for this expression ? + matchingGroup = groupingContext.groupFor(child); + } + // attributes can only refer to declared groups + if (child instanceof Attribute) { + Check.notNull(matchingGroup, "Cannot find group [{}]", Expressions.name(child)); + queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, child.dataType().isDateBased()), + ((Attribute) child)); + } + // handle histogram + else if (child instanceof GroupingFunction) { + queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, child.dataType().isDateBased()), + ((GroupingFunction) child).toAttribute()); + } else if (child.foldable()) { queryC = queryC.addColumn(ne.toAttribute()); } - // fallback to regular agg functions - else { - // the only thing left is agg function - Check.isTrue(Functions.isAggregate(child), - "Expected aggregate function inside alias; got [{}]", child.nodeString()); - AggregateFunction af = (AggregateFunction) child; - Tuple withAgg = addAggFunction(matchingGroup, af, compoundAggMap, queryC); - // make sure to add the inner id (to handle compound aggs) - queryC = withAgg.v1().addColumn(withAgg.v2().context(), af.toAttribute()); - } + // fallback to regular agg functions + else { + // the only thing left is agg function + Check.isTrue(Functions.isAggregate(child), "Expected aggregate function inside alias; got [{}]", + child.nodeString()); + AggregateFunction af = (AggregateFunction) child; + Tuple withAgg = addAggFunction(matchingGroup, af, compoundAggMap, queryC); + // make sure to add the inner id (to handle compound aggs) + queryC = withAgg.v1().addColumn(withAgg.v2().context(), af.toAttribute()); } + } // not an Alias or Function means it's an Attribute so apply the same logic as above - } else { - GroupByKey matchingGroup = null; - if (groupingContext != null) { - matchingGroup = groupingContext.groupFor(ne); - Check.notNull(matchingGroup, "Cannot find group [{}]", Expressions.name(ne)); + } else { + GroupByKey matchingGroup = null; + if (groupingContext != null) { + matchingGroup = groupingContext.groupFor(ne); + Check.notNull(matchingGroup, "Cannot find group [{}]", Expressions.name(ne)); - queryC = queryC.addColumn( - new GroupByRef(matchingGroup.id(), null, ne.dataType().isDateBased()), ne.toAttribute()); - } + queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, ne.dataType().isDateBased()), ne.toAttribute()); + } else if (ne.foldable()) { queryC = queryC.addColumn(ne.toAttribute()); } } } - if (!aliases.isEmpty()) { - Map newAliases = new LinkedHashMap<>(queryC.aliases()); - newAliases.putAll(aliases); - queryC = queryC.withAliases(new AttributeMap<>(newAliases)); - } - return new EsQueryExec(exec.source(), exec.index(), a.output(), queryC); + if (!aliases.isEmpty()) { + Map newAliases = new LinkedHashMap<>(queryC.aliases()); + newAliases.putAll(aliases); + queryC = queryC.withAliases(new AttributeMap<>(newAliases)); } - return a; + return new EsQueryExec(exec.source(), exec.index(), a.output(), queryC); } - private Tuple addAggFunction(GroupByKey groupingAgg, AggregateFunction f, + private static Tuple addAggFunction(GroupByKey groupingAgg, AggregateFunction f, Map compoundAggMap, QueryContainer queryC) { String functionId = f.functionId(); // handle count as a special case agg @@ -551,6 +563,52 @@ class QueryFolder extends RuleExecutor { } } + + private static class FoldPivot extends FoldingRule { + + @Override + protected PhysicalPlan rule(PivotExec plan) { + if (plan.child() instanceof EsQueryExec) { + EsQueryExec exec = (EsQueryExec) plan.child(); + Pivot p = plan.pivot(); + EsQueryExec fold = FoldAggregate + .fold(new AggregateExec(plan.source(), exec, + new ArrayList<>(p.groupingSet()), combine(p.groupingSet(), p.aggregates())), exec); + + // replace the aggregate extractors with pivot specific extractors + // these require a reference to the pivoting column in order to compare the value + // due to the Pivot structure - the column is the last entry in the grouping set + QueryContainer query = fold.queryContainer(); + + List> fields = new ArrayList<>(query.fields()); + int startingIndex = fields.size() - p.aggregates().size() - 1; + // pivot grouping + Tuple groupTuple = fields.remove(startingIndex); + AttributeSet valuesOutput = plan.pivot().valuesOutput(); + + for (int i = startingIndex; i < fields.size(); i++) { + Tuple tuple = fields.remove(i); + for (Attribute attribute : valuesOutput) { + fields.add(new Tuple<>(new PivotColumnRef(groupTuple.v1(), tuple.v1(), attribute.fold()), attribute.id())); + } + i += valuesOutput.size(); + } + + return fold.with(new QueryContainer(query.query(), query.aggs(), + fields, + query.aliases(), + query.pseudoFunctions(), + query.scalarFunctions(), + query.sort(), + query.limit(), + query.shouldTrackHits(), + query.shouldIncludeFrozen(), + valuesOutput.size())); + } + return plan; + } + } + // // local // diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index 9a837268f080..d46b2a3da01b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -679,7 +679,7 @@ final class QueryTranslator { // Since RangeQueryBuilder can handle date as String as well, we'll format it as String and provide the format as well. value = formatter.format((ZonedDateTime) value); } else { - formatter = DateFormatter.forPattern(TIME_FORMAT); + formatter = DateFormatter.forPattern(TIME_FORMAT); value = formatter.format((OffsetTime) value); } format = formatter.pattern(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Verifier.java index 1e527657ae0c..fe4ec05ab33a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Verifier.java @@ -5,7 +5,9 @@ */ package org.elasticsearch.xpack.sql.planner; +import org.elasticsearch.xpack.sql.expression.function.aggregate.InnerAggregate; import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.sql.plan.physical.PivotExec; import org.elasticsearch.xpack.sql.plan.physical.Unexecutable; import org.elasticsearch.xpack.sql.plan.physical.UnplannedExec; import org.elasticsearch.xpack.sql.tree.Node; @@ -14,6 +16,8 @@ import java.util.ArrayList; import java.util.List; import java.util.Objects; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; + abstract class Verifier { static class Failure { @@ -53,8 +57,8 @@ abstract class Verifier { } } - private static Failure fail(Node source, String message) { - return new Failure(source, message); + private static Failure fail(Node source, String message, Object... args) { + return new Failure(source, format(null, message, args)); } static List verifyMappingPlan(PhysicalPlan plan) { @@ -70,10 +74,22 @@ abstract class Verifier { } }); }); + // verify Pivot + checkInnerAggsPivot(plan, failures); return failures; } + private static void checkInnerAggsPivot(PhysicalPlan plan, List failures) { + plan.forEachDown(p -> { + p.pivot().aggregates().forEach(agg -> agg.forEachDown(e -> { + if (e instanceof InnerAggregate) { + failures.add(fail(e, "Aggregation [{}] not supported (yet) by PIVOT", e.sourceText())); + } + })); + }, PivotExec.class); + } + static List verifyExecutingPlan(PhysicalPlan plan) { List failures = new ArrayList<>(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/PivotColumnRef.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/PivotColumnRef.java new file mode 100644 index 000000000000..60ee3b7409c0 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/PivotColumnRef.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.querydsl.container; + +import org.elasticsearch.xpack.sql.execution.search.AggRef; +import org.elasticsearch.xpack.sql.execution.search.FieldExtraction; + +public class PivotColumnRef extends AggRef { + + private final FieldExtraction agg; + private final FieldExtraction pivot; + private final Object value; + + public PivotColumnRef(FieldExtraction pivot, FieldExtraction agg, Object value) { + this.pivot = pivot; + this.agg = agg; + // due to the way Elasticsearch aggs work + // promote the object to expect types so that the comparison works + this.value = esAggType(value); + } + + private static Object esAggType(Object value) { + if (value instanceof Number) { + Number n = (Number) value; + if (value instanceof Double) { + return value; + } + if (value instanceof Float) { + return Double.valueOf(n.doubleValue()); + } + return Long.valueOf(n.longValue()); + } + return value; + } + + public FieldExtraction pivot() { + return pivot; + } + + public FieldExtraction agg() { + return agg; + } + + public Object value() { + return value; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java index 5ff560f4baa4..c75a20082027 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java @@ -83,13 +83,15 @@ public class QueryContainer { private final int limit; private final boolean trackHits; private final boolean includeFrozen; + // used when pivoting for retrieving at least one pivot row + private final int minPageSize; // computed private Boolean aggsOnly; private Boolean customSort; public QueryContainer() { - this(null, null, null, null, null, null, null, -1, false, false); + this(null, null, null, null, null, null, null, -1, false, false, -1); } public QueryContainer(Query query, @@ -102,7 +104,8 @@ public class QueryContainer { Set sort, int limit, boolean trackHits, - boolean includeFrozen) { + boolean includeFrozen, + int minPageSize) { this.query = query; this.aggs = aggs == null ? Aggs.EMPTY : aggs; this.fields = fields == null || fields.isEmpty() ? emptyList() : fields; @@ -113,6 +116,7 @@ public class QueryContainer { this.limit = limit; this.trackHits = trackHits; this.includeFrozen = includeFrozen; + this.minPageSize = minPageSize; } /** @@ -247,49 +251,62 @@ public class QueryContainer { return includeFrozen; } + public int minPageSize() { + return minPageSize; + } + // // copy methods // public QueryContainer with(Query q) { - return new QueryContainer(q, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen); + return new QueryContainer(q, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen, + minPageSize); + } + + public QueryContainer withFields(List> f) { + return new QueryContainer(query, aggs, f, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen, + minPageSize); } public QueryContainer withAliases(AttributeMap a) { - return new QueryContainer(query, aggs, fields, a, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen); + return new QueryContainer(query, aggs, fields, a, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen, + minPageSize); } public QueryContainer withPseudoFunctions(Map p) { - return new QueryContainer(query, aggs, fields, aliases, p, scalarFunctions, sort, limit, trackHits, includeFrozen); + return new QueryContainer(query, aggs, fields, aliases, p, scalarFunctions, sort, limit, trackHits, includeFrozen, minPageSize); } public QueryContainer with(Aggs a) { - return new QueryContainer(query, a, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen); + return new QueryContainer(query, a, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen, + minPageSize); } public QueryContainer withLimit(int l) { return l == limit ? this : new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, l, trackHits, - includeFrozen); + includeFrozen, minPageSize); } public QueryContainer withTrackHits() { return trackHits ? this : new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, true, - includeFrozen); + includeFrozen, minPageSize); } public QueryContainer withFrozen() { return includeFrozen ? this : new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, - trackHits, true); + trackHits, true, minPageSize); } public QueryContainer withScalarProcessors(AttributeMap procs) { - return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, procs, sort, limit, trackHits, includeFrozen); + return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, procs, sort, limit, trackHits, includeFrozen, minPageSize); } public QueryContainer addSort(Sort sortable) { Set sort = new LinkedHashSet<>(this.sort); sort.add(sortable); - return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen); + return new QueryContainer(query, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen, + minPageSize); } private String aliasName(Attribute attr) { @@ -344,7 +361,8 @@ public class QueryContainer { false, attr.parent().name()); return new Tuple<>( - new QueryContainer(q, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen), + new QueryContainer(q, aggs, fields, aliases, pseudoFunctions, scalarFunctions, sort, limit, trackHits, includeFrozen, + minPageSize), nestedFieldRef); } @@ -447,7 +465,7 @@ public class QueryContainer { ExpressionId id = attr instanceof AggregateFunctionAttribute ? ((AggregateFunctionAttribute) attr).innerId() : attr.id(); return new QueryContainer(query, aggs, combine(fields, new Tuple<>(ref, id)), aliases, pseudoFunctions, scalarFunctions, - sort, limit, trackHits, includeFrozen); + sort, limit, trackHits, includeFrozen, minPageSize); } public AttributeMap scalarFunctions() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java index 8f2c37356028..6f1ee47f4da3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java @@ -12,7 +12,8 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.common.io.SqlStreamInput; import org.elasticsearch.xpack.sql.common.io.SqlStreamOutput; -import org.elasticsearch.xpack.sql.execution.search.CompositeAggregationCursor; +import org.elasticsearch.xpack.sql.execution.search.CompositeAggCursor; +import org.elasticsearch.xpack.sql.execution.search.PivotCursor; import org.elasticsearch.xpack.sql.execution.search.ScrollCursor; import org.elasticsearch.xpack.sql.execution.search.extractor.BucketExtractors; import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractors; @@ -45,7 +46,8 @@ public final class Cursors { // cursors entries.add(new NamedWriteableRegistry.Entry(Cursor.class, EmptyCursor.NAME, in -> Cursor.EMPTY)); entries.add(new NamedWriteableRegistry.Entry(Cursor.class, ScrollCursor.NAME, ScrollCursor::new)); - entries.add(new NamedWriteableRegistry.Entry(Cursor.class, CompositeAggregationCursor.NAME, CompositeAggregationCursor::new)); + entries.add(new NamedWriteableRegistry.Entry(Cursor.class, CompositeAggCursor.NAME, CompositeAggCursor::new)); + entries.add(new NamedWriteableRegistry.Entry(Cursor.class, PivotCursor.NAME, PivotCursor::new)); entries.add(new NamedWriteableRegistry.Entry(Cursor.class, TextFormatterCursor.NAME, TextFormatterCursor::new)); entries.add(new NamedWriteableRegistry.Entry(Cursor.class, ListCursor.NAME, ListCursor::new)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java index 7e20abc31de9..a07b7adfe37d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java @@ -21,7 +21,7 @@ import static java.util.Collections.emptyList; public class ListCursor implements Cursor { - public static final String NAME = "p"; + public static final String NAME = "l"; private final List> data; private final int columnCount; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java index 2ce3c1fac964..4a783edae58a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java @@ -59,6 +59,8 @@ public enum DataType { GEO_POINT( ExtTypes.GEOMETRY, Double.BYTES*2, Integer.MAX_VALUE, 25 * 2 + 8, false, false, false), // IP can be v4 or v6. The latter has 2^128 addresses or 340,282,366,920,938,463,463,374,607,431,768,211,456 // aka 39 chars + SHAPE( ExtTypes.GEOMETRY, Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE, false, false, false), + // display size = 2 doubles + len("POINT( )") IP( "ip", JDBCType.VARCHAR, 39, 39, 0, false, false, true), // // INTERVALS @@ -254,7 +256,7 @@ public enum DataType { } public boolean isGeo() { - return this == GEO_POINT || this == GEO_SHAPE; + return this == GEO_POINT || this == GEO_SHAPE || this == SHAPE; } public boolean isDateBased() { @@ -268,7 +270,7 @@ public enum DataType { public boolean isDateOrTimeBased() { return isDateBased() || isTimeBased(); } - + // data type extract-able from _source or from docvalue_fields public boolean isFromDocValuesOnly() { return this == KEYWORD // because of ignore_above. Extracting this from _source wouldn't make sense if it wasn't indexed at all. @@ -276,13 +278,14 @@ public enum DataType { || this == DATETIME || this == SCALED_FLOAT // because of scaling_factor || this == GEO_POINT - || this == GEO_SHAPE; + || this == GEO_SHAPE + || this == SHAPE; } - + public static DataType fromOdbcType(String odbcType) { return ODBC_TO_ES.get(odbcType); } - + public static DataType fromSqlOrEsType(String typeName) { return SQL_TO_ES.get(typeName.toUpperCase(Locale.ROOT)); } @@ -305,7 +308,7 @@ public enum DataType { public String format() { return isDateOrTimeBased() ? DateUtils.DATE_PARSE_FORMAT : null; } - + /** * Returns the appropriate NumberType enum corresponding to this es type */ diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index 8844301006fd..b4068932bf0f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -844,4 +844,57 @@ public class VerifierErrorMessagesTests extends ESTestCase { accept("SELECT ST_X(shape) FROM test"); } -} + // + // Pivot verifications + // + public void testPivotNonExactColumn() { + assertEquals("1:72: Field [text] of data type [text] cannot be used for grouping;" + + " No keyword/multi-field defined exact matches for [text]; define one or use MATCH/QUERY instead", + error("SELECT * FROM (SELECT int, text, keyword FROM test) " + "PIVOT(AVG(int) FOR text IN ('bla'))")); + } + + public void testPivotColumnUsedInsteadOfAgg() { + assertEquals("1:59: No aggregate function found in PIVOT at [int]", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(int FOR keyword IN ('bla'))")); + } + + public void testPivotScalarUsedInsteadOfAgg() { + assertEquals("1:59: No aggregate function found in PIVOT at [ROUND(int)]", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(ROUND(int) FOR keyword IN ('bla'))")); + } + + public void testPivotScalarUsedAlongSideAgg() { + assertEquals("1:59: Non-aggregate function found in PIVOT at [AVG(int) + ROUND(int)]", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(AVG(int) + ROUND(int) FOR keyword IN ('bla'))")); + } + + public void testPivotValueNotFoldable() { + assertEquals("1:91: Non-literal [bool] found inside PIVOT values", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(AVG(int) FOR keyword IN ('bla', bool))")); + } + + public void testPivotWithFunctionInput() { + assertEquals("1:37: No functions allowed (yet); encountered [YEAR(date)]", + error("SELECT * FROM (SELECT int, keyword, YEAR(date) FROM test) " + "PIVOT(AVG(int) FOR keyword IN ('bla'))")); + } + + public void testPivotWithFoldableFunctionInValues() { + assertEquals("1:85: Non-literal [UCASE('bla')] found inside PIVOT values", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(AVG(int) FOR keyword IN ( UCASE('bla') ))")); + } + + public void testPivotWithNull() { + assertEquals("1:85: Null not allowed as a PIVOT value", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(AVG(int) FOR keyword IN ( null ))")); + } + + public void testPivotValuesHaveDifferentTypeThanColumn() { + assertEquals("1:81: Literal ['bla'] of type [keyword] does not match type [boolean] of PIVOT column [bool]", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(AVG(int) FOR bool IN ('bla'))")); + } + + public void testPivotValuesWithMultipleDifferencesThanColumn() { + assertEquals("1:81: Literal ['bla'] of type [keyword] does not match type [boolean] of PIVOT column [bool]", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(AVG(int) FOR bool IN ('bla', true))")); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java index 4216db7cb70d..195d11be434d 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java @@ -19,8 +19,8 @@ import java.util.BitSet; import java.util.List; import java.util.function.Supplier; -public class CompositeAggregationCursorTests extends AbstractSqlWireSerializingTestCase { - public static CompositeAggregationCursor randomCompositeCursor() { +public class CompositeAggregationCursorTests extends AbstractSqlWireSerializingTestCase { + public static CompositeAggCursor randomCompositeCursor() { int extractorsSize = between(1, 20); ZoneId id = randomSafeZone(); List extractors = new ArrayList<>(extractorsSize); @@ -28,7 +28,7 @@ public class CompositeAggregationCursorTests extends AbstractSqlWireSerializingT extractors.add(randomBucketExtractor(id)); } - return new CompositeAggregationCursor(new byte[randomInt(256)], extractors, randomBitSet(extractorsSize), + return new CompositeAggCursor(new byte[randomInt(256)], extractors, randomBitSet(extractorsSize), randomIntBetween(10, 1024), randomBoolean(), randomAlphaOfLength(5)); } @@ -41,8 +41,8 @@ public class CompositeAggregationCursorTests extends AbstractSqlWireSerializingT } @Override - protected CompositeAggregationCursor mutateInstance(CompositeAggregationCursor instance) throws IOException { - return new CompositeAggregationCursor(instance.next(), instance.extractors(), + protected CompositeAggCursor mutateInstance(CompositeAggCursor instance) throws IOException { + return new CompositeAggCursor(instance.next(), instance.extractors(), randomValueOtherThan(instance.mask(), () -> randomBitSet(instance.extractors().size())), randomValueOtherThan(instance.limit(), () -> randomIntBetween(1, 512)), !instance.includeFrozen(), @@ -50,17 +50,17 @@ public class CompositeAggregationCursorTests extends AbstractSqlWireSerializingT } @Override - protected CompositeAggregationCursor createTestInstance() { + protected CompositeAggCursor createTestInstance() { return randomCompositeCursor(); } @Override - protected Reader instanceReader() { - return CompositeAggregationCursor::new; + protected Reader instanceReader() { + return CompositeAggCursor::new; } @Override - protected ZoneId instanceZoneId(CompositeAggregationCursor instance) { + protected ZoneId instanceZoneId(CompositeAggCursor instance) { List extractors = instance.extractors(); for (BucketExtractor bucketExtractor : extractors) { ZoneId zoneId = MetricAggExtractorTests.extractZoneId(bucketExtractor); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java index b7404b8412a4..2544a0292608 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java @@ -396,7 +396,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase fe.extractFromSource(map)); assertThat(ex.getMessage(), is("Multiple values (returned by [a.b.c.d.e.f.g]) are not supported")); } - + public void testFieldsWithSingleValueArrayAsSubfield() { FieldHitExtractor fe = getFieldHitExtractor("a.b", false); Object value = randomNonNullValue(); @@ -405,7 +405,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase map = new HashMap<>(); @@ -414,7 +414,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase fe.extractFromSource(map)); assertThat(ex.getMessage(), is("Arrays (returned by [a.b]) are not supported")); } - + public void testFieldsWithSingleValueArrayAsSubfield_TwoNestedLists() { FieldHitExtractor fe = getFieldHitExtractor("a.b.c", false); Object value = randomNonNullValue(); @@ -423,7 +423,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase map = new HashMap<>(); @@ -432,7 +432,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase fe.extractFromSource(map)); assertThat(ex.getMessage(), is("Arrays (returned by [a.b.c]) are not supported")); } - + public void testFieldsWithSingleValueArrayAsSubfield_TwoNestedLists2() { FieldHitExtractor fe = getFieldHitExtractor("a.b.c", false); Object value = randomNonNullValue(); @@ -462,7 +462,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase map = new HashMap<>(); map.put(fieldName, "POINT (1 2)"); assertEquals(new GeoShape(1, 2), fe.extractFromSource(map)); @@ -474,7 +474,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase map = new HashMap<>(); map.put(fieldName, "POINT (1 2)"); assertEquals(new GeoShape(1, 2), fe.extractFromSource(map)); @@ -487,7 +487,8 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase fe.extractFromSource(map2)); assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); - FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, DataType.GEO_SHAPE, UTC, false, true); + FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, + randomBoolean() ? DataType.GEO_SHAPE : DataType.SHAPE, UTC, false, true); assertEquals(new GeoShape(1, 2), lenientFe.extractFromSource(map2)); } @@ -605,7 +606,7 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase new BigDecimal("20012312345621343256123456254.20012312345621343256123456254"))); return value.get(); } - + private void assertFieldHitEquals(Object expected, Object actual) { if (expected instanceof BigDecimal) { // parsing will, by default, build a Double even if the initial value is BigDecimal diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java index 818897dce343..82a580d159cb 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/geo/StWkttosqlProcessorTests.java @@ -33,11 +33,11 @@ public class StWkttosqlProcessorTests extends ESTestCase { assertEquals("Cannot parse [some random string] as a geo_shape value", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, () -> procPoint.process("point (foo bar)")); - assertEquals("Cannot parse [point (foo bar)] as a geo_shape value", siae.getMessage()); + assertEquals("Cannot parse [point (foo bar)] as a geo_shape or shape value", siae.getMessage()); siae = expectThrows(SqlIllegalArgumentException.class, () -> procPoint.process("point (10 10")); - assertEquals("Cannot parse [point (10 10] as a geo_shape value", siae.getMessage()); + assertEquals("Cannot parse [point (10 10] as a geo_shape or shape value", siae.getMessage()); } public void testCoerce() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index 2d3b6cdee527..0238cfe8591c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.optimizer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer.PruneSubqueryAliases; +import org.elasticsearch.xpack.sql.analysis.index.EsIndex; import org.elasticsearch.xpack.sql.expression.Alias; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expression.TypeResolution; @@ -20,6 +21,7 @@ import org.elasticsearch.xpack.sql.expression.Order; import org.elasticsearch.xpack.sql.expression.Order.OrderDirection; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.sql.expression.function.aggregate.Avg; import org.elasticsearch.xpack.sql.expression.function.aggregate.Count; import org.elasticsearch.xpack.sql.expression.function.aggregate.First; import org.elasticsearch.xpack.sql.expression.function.aggregate.Last; @@ -87,14 +89,17 @@ import org.elasticsearch.xpack.sql.optimizer.Optimizer.PropagateEquals; import org.elasticsearch.xpack.sql.optimizer.Optimizer.PruneDuplicateFunctions; import org.elasticsearch.xpack.sql.optimizer.Optimizer.ReplaceFoldableAttributes; import org.elasticsearch.xpack.sql.optimizer.Optimizer.ReplaceMinMaxWithTopHits; +import org.elasticsearch.xpack.sql.optimizer.Optimizer.RewritePivot; import org.elasticsearch.xpack.sql.optimizer.Optimizer.SimplifyCase; import org.elasticsearch.xpack.sql.optimizer.Optimizer.SimplifyConditional; import org.elasticsearch.xpack.sql.optimizer.Optimizer.SortAggregateOnOrderBy; import org.elasticsearch.xpack.sql.plan.logical.Aggregate; +import org.elasticsearch.xpack.sql.plan.logical.EsRelation; import org.elasticsearch.xpack.sql.plan.logical.Filter; import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.logical.OrderBy; +import org.elasticsearch.xpack.sql.plan.logical.Pivot; import org.elasticsearch.xpack.sql.plan.logical.Project; import org.elasticsearch.xpack.sql.plan.logical.SubQueryAlias; import org.elasticsearch.xpack.sql.plan.logical.command.ShowTables; @@ -1498,4 +1503,23 @@ public class OptimizerTests extends ESTestCase { assertEquals(firstAlias, groupings.get(0)); assertEquals(secondAlias, groupings.get(1)); } -} + + public void testPivotRewrite() { + FieldAttribute column = getFieldAttribute("pivot"); + FieldAttribute number = getFieldAttribute("number"); + List values = Arrays.asList(new Alias(EMPTY, "ONE", L(1)), new Alias(EMPTY, "TWO", L(2))); + List aggs = Arrays.asList(new Avg(EMPTY, number)); + Pivot pivot = new Pivot(EMPTY, new EsRelation(EMPTY, new EsIndex("table", emptyMap()), false), column, values, aggs); + + LogicalPlan result = new RewritePivot().apply(pivot); + assertEquals(Pivot.class, result.getClass()); + Pivot pv = (Pivot) result; + assertEquals(pv.aggregates(), aggs); + assertEquals(Filter.class, pv.child().getClass()); + Filter f = (Filter) pv.child(); + assertEquals(In.class, f.condition().getClass()); + In in = (In) f.condition(); + assertEquals(column, in.value()); + assertEquals(Arrays.asList(L(1), L(2)), in.list()); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java index 6b7500cab661..adf1ad5b4d1e 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java @@ -49,7 +49,7 @@ public class SysTypesTests extends ESTestCase { "INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND", "INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND", "INTERVAL_HOUR_TO_MINUTE", "INTERVAL_HOUR_TO_SECOND", "INTERVAL_MINUTE_TO_SECOND", - "GEO_SHAPE", "GEO_POINT", "UNSUPPORTED", "OBJECT", "NESTED"); + "GEO_SHAPE", "GEO_POINT", "SHAPE", "UNSUPPORTED", "OBJECT", "NESTED"); cmd.execute(session(), wrap(p -> { SchemaRowSet r = (SchemaRowSet) p.rowSet(); @@ -62,7 +62,7 @@ public class SysTypesTests extends ESTestCase { assertFalse(r.column(10, Boolean.class)); // no auto-increment assertFalse(r.column(11, Boolean.class)); - + for (int i = 0; i < r.size(); i++) { assertEquals(names.get(i), r.column(0)); r.advanceRow(); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/PostOptimizerVerifierTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/PostOptimizerVerifierTests.java new file mode 100644 index 000000000000..4e89fdb21544 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/PostOptimizerVerifierTests.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.planner; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.TestUtils; +import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; +import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier; +import org.elasticsearch.xpack.sql.analysis.index.EsIndex; +import org.elasticsearch.xpack.sql.analysis.index.IndexResolution; +import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; +import org.elasticsearch.xpack.sql.optimizer.Optimizer; +import org.elasticsearch.xpack.sql.parser.SqlParser; +import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.sql.stats.Metrics; +import org.elasticsearch.xpack.sql.type.EsField; +import org.elasticsearch.xpack.sql.type.TypesTests; +import org.junit.After; +import org.junit.Before; + +import java.util.Map; + +public class PostOptimizerVerifierTests extends ESTestCase { + + private SqlParser parser; + private Analyzer analyzer; + private Optimizer optimizer; + private Planner planner; + private IndexResolution indexResolution; + + @Before + public void init() { + parser = new SqlParser(); + + Map mapping = TypesTests.loadMapping("mapping-multi-field-variation.json"); + EsIndex test = new EsIndex("test", mapping); + indexResolution = IndexResolution.valid(test); + analyzer = new Analyzer(TestUtils.TEST_CFG, new FunctionRegistry(), indexResolution, new Verifier(new Metrics())); + optimizer = new Optimizer(); + planner = new Planner(); + } + + @After + public void destroy() { + parser = null; + analyzer = null; + } + + private PhysicalPlan plan(String sql) { + return planner.plan(optimizer.optimize(analyzer.analyze(parser.createStatement(sql), true)), true); + } + + private String error(String sql) { + return error(indexResolution, sql); + } + + private String error(IndexResolution getIndexResult, String sql) { + PlanningException e = expectThrows(PlanningException.class, () -> plan(sql)); + assertTrue(e.getMessage().startsWith("Found ")); + String header = "Found 1 problem(s)\nline "; + return e.getMessage().substring(header.length()); + } + + public void testPivotInnerAgg() { + assertEquals("1:59: Aggregation [SUM_OF_SQUARES(int)] not supported (yet) by PIVOT", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(SUM_OF_SQUARES(int) FOR keyword IN ('bla'))")); + } + + public void testPivotNestedInnerAgg() { + assertEquals("1:65: Aggregation [SUM_OF_SQUARES(int)] not supported (yet) by PIVOT", + error("SELECT * FROM (SELECT int, keyword, bool FROM test) " + "PIVOT(ROUND(SUM_OF_SQUARES(int)) FOR keyword IN ('bla'))")); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java index c94da6621515..11f6cc949de4 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; import org.elasticsearch.xpack.sql.analysis.analyzer.Verifier; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; import org.elasticsearch.xpack.sql.analysis.index.IndexResolution; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute; import org.elasticsearch.xpack.sql.optimizer.Optimizer; @@ -26,8 +27,10 @@ import org.elasticsearch.xpack.sql.type.TypesTests; import org.junit.AfterClass; import org.junit.BeforeClass; +import java.util.Arrays; import java.util.Map; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.startsWith; @@ -397,4 +400,18 @@ public class QueryFolderTests extends ESTestCase { AggregateFunctionAttribute afa = (AggregateFunctionAttribute) ee.output().get(0); assertThat(afa.propertyPath(), endsWith("[3.0]")); } + + public void testFoldingOfPivot() { + PhysicalPlan p = plan("SELECT * FROM (SELECT int, keyword, bool FROM test) PIVOT(AVG(int) FOR keyword IN ('A', 'B'))"); + assertEquals(EsQueryExec.class, p.getClass()); + EsQueryExec ee = (EsQueryExec) p; + assertEquals(3, ee.output().size()); + assertEquals(Arrays.asList("bool", "'A'", "'B'"), Expressions.names(ee.output())); + String q = ee.toString().replaceAll("\\s+", ""); + assertThat(q, containsString("\"query\":{\"terms\":{\"keyword\":[\"A\",\"B\"]")); + String a = ee.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""); + assertThat(a, containsString("\"terms\":{\"field\":\"bool\"")); + assertThat(a, containsString("\"terms\":{\"field\":\"keyword\"")); + assertThat(a, containsString("{\"avg\":{\"field\":\"int\"}")); + } } diff --git a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index 700d896f2538..3f133ee539e9 100644 --- a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -39,7 +39,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; import static java.util.Collections.emptyList; @@ -49,6 +48,7 @@ import static java.util.Collections.singletonMap; import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue; import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -213,7 +213,7 @@ public class XPackRestIT extends ESClientYamlSuiteTestCase { }, () -> "Exception when disabling monitoring"); - awaitBusy(() -> { + assertBusy(() -> { try { ClientYamlTestResponse response = callApi("xpack.usage", singletonMap("filter_path", "monitoring.enabled_exporters"), emptyList(), @@ -222,7 +222,7 @@ public class XPackRestIT extends ESClientYamlSuiteTestCase { @SuppressWarnings("unchecked") final Map exporters = (Map) response.evaluate("monitoring.enabled_exporters"); if (exporters.isEmpty() == false) { - return false; + fail("Exporters were not found"); } final Map params = new HashMap<>(); @@ -237,7 +237,8 @@ public class XPackRestIT extends ESClientYamlSuiteTestCase { final Map node = (Map) nodes.values().iterator().next(); final Number activeWrites = (Number) extractValue("thread_pool.write.active", node); - return activeWrites != null && activeWrites.longValue() == 0L; + assertNotNull(activeWrites); + assertThat(activeWrites, equalTo(0)); } catch (Exception e) { throw new ElasticsearchException("Failed to wait for monitoring exporters to stop:", e); } @@ -281,26 +282,15 @@ public class XPackRestIT extends ESClientYamlSuiteTestCase { Map params, List> bodies, CheckedFunction success, - Supplier error) throws Exception { - - AtomicReference exceptionHolder = new AtomicReference<>(); - awaitBusy(() -> { - try { - ClientYamlTestResponse response = callApi(apiName, params, bodies, getApiCallHeaders()); - if (response.getStatusCode() == HttpStatus.SC_OK) { - exceptionHolder.set(null); - return success.apply(response); - } - return false; - } catch (IOException e) { - exceptionHolder.set(e); - } - return false; - }); - - IOException exception = exceptionHolder.get(); - if (exception != null) { - throw new IllegalStateException(error.get(), exception); + Supplier error) { + try { + // The actual method call that sends the API requests returns a Future, but we immediately + // call .get() on it so there's no need for this method to do any other awaiting. + ClientYamlTestResponse response = callApi(apiName, params, bodies, getApiCallHeaders()); + assertEquals(response.getStatusCode(), HttpStatus.SC_OK); + success.apply(response); + } catch (Exception e) { + throw new IllegalStateException(error.get(), e); } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/graph.explore.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/graph.explore.json index aa8177bb8621..253ab219d528 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/graph.explore.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/graph.explore.json @@ -7,39 +7,17 @@ "url":{ "paths":[ { - "path":"/{index}/_graph/explore", - "methods":[ + "path": "/{index}/_graph/explore", + "methods": [ "GET", "POST" ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" + "parts": { + "index": { + "type": "list", + "description": "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" } } - }, - { - "path":"/{index}/{type}/_graph/explore", - "methods":[ - "GET", - "POST" - ], - "parts":{ - "index":{ - "type":"list", - "description":"A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" - }, - "type":{ - "type":"list", - "description":"A comma-separated list of document types to search; leave empty to perform the operation on all types", - "deprecated":true - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"Specifying types in urls has been deprecated" - } } ] }, diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.delete_data_frame_transform.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.delete_transform.json similarity index 94% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.delete_data_frame_transform.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/transform.delete_transform.json index d8545728ab9c..01915ed7b56b 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.delete_data_frame_transform.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.delete_transform.json @@ -1,5 +1,5 @@ { - "data_frame.delete_data_frame_transform":{ + "transform.delete_transform":{ "documentation":{ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-transform.html" }, diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.get_transform.json similarity index 87% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/transform.get_transform.json index e25a3301ec0c..9baf3446a2ae 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.get_transform.json @@ -1,5 +1,5 @@ { - "data_frame.get_data_frame_transform":{ + "transform.get_transform":{ "documentation":{ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform.html" }, @@ -40,7 +40,7 @@ "allow_no_match":{ "type":"boolean", "required":false, - "description":"Whether to ignore if a wildcard expression matches no data frame transforms. (This includes `_all` string or when no data frame transforms have been specified)" + "description":"Whether to ignore if a wildcard expression matches no transforms. (This includes `_all` string or when no transforms have been specified)" } } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform_stats.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.get_transform_stats.json similarity index 86% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform_stats.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/transform.get_transform_stats.json index 57b004482a72..f37dfe29ff4f 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.get_data_frame_transform_stats.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.get_transform_stats.json @@ -1,5 +1,5 @@ { - "data_frame.get_data_frame_transform_stats":{ + "transform.get_transform_stats":{ "documentation":{ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-stats.html" }, @@ -34,7 +34,7 @@ "allow_no_match":{ "type":"boolean", "required":false, - "description":"Whether to ignore if a wildcard expression matches no data frame transforms. (This includes `_all` string or when no data frame transforms have been specified)" + "description":"Whether to ignore if a wildcard expression matches no transforms. (This includes `_all` string or when no transforms have been specified)" } } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.preview_data_frame_transform.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.preview_transform.json similarity index 75% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.preview_data_frame_transform.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/transform.preview_transform.json index e3f24448b9f6..b5ff3cbba966 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.preview_data_frame_transform.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.preview_transform.json @@ -1,5 +1,5 @@ { - "data_frame.preview_data_frame_transform":{ + "transform.preview_transform":{ "documentation":{ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/preview-transform.html" }, @@ -15,7 +15,7 @@ ] }, "body":{ - "description":"The definition for the data_frame transform to preview", + "description":"The definition for the transform to preview", "required":true } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.put_data_frame_transform.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.put_transform.json similarity index 82% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.put_data_frame_transform.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/transform.put_transform.json index 5e7354f435d9..7925fc1063be 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.put_data_frame_transform.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.put_transform.json @@ -1,5 +1,5 @@ { - "data_frame.put_data_frame_transform":{ + "transform.put_transform":{ "documentation":{ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/put-transform.html" }, @@ -24,11 +24,11 @@ "defer_validation": { "type": "boolean", "required": false, - "description": "If validations should be deferred until data frame transform starts, defaults to false." + "description": "If validations should be deferred until transform starts, defaults to false." } }, "body":{ - "description":"The data frame transform definition", + "description":"The transform definition", "required":true } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.start_data_frame_transform.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.start_transform.json similarity index 93% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.start_data_frame_transform.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/transform.start_transform.json index c0d701be5624..408f978e22cb 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.start_data_frame_transform.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.start_transform.json @@ -1,5 +1,5 @@ { - "data_frame.start_data_frame_transform":{ + "transform.start_transform":{ "documentation":{ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/start-transform.html" }, diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.stop_data_frame_transform.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.stop_transform.json similarity index 86% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.stop_data_frame_transform.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/transform.stop_transform.json index c88a7176309a..b09d19703bf3 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.stop_data_frame_transform.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.stop_transform.json @@ -1,5 +1,5 @@ { - "data_frame.stop_data_frame_transform":{ + "transform.stop_transform":{ "documentation":{ "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/stop-transform.html" }, @@ -34,7 +34,7 @@ "allow_no_match":{ "type":"boolean", "required":false, - "description":"Whether to ignore if a wildcard expression matches no data frame transforms. (This includes `_all` string or when no data frame transforms have been specified)" + "description":"Whether to ignore if a wildcard expression matches no transforms. (This includes `_all` string or when no transforms have been specified)" } } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.update_data_frame_transform.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.update_transform.json similarity index 82% rename from x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.update_data_frame_transform.json rename to x-pack/plugin/src/test/resources/rest-api-spec/api/transform.update_transform.json index 70d1342815eb..68de23da71b9 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/data_frame.update_data_frame_transform.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/transform.update_transform.json @@ -1,5 +1,5 @@ { - "data_frame.update_data_frame_transform": { + "transform.update_transform": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/update-transform.html" }, @@ -23,11 +23,11 @@ "defer_validation": { "type": "boolean", "required": false, - "description": "If validations should be deferred until data frame transform starts, defaults to false." + "description": "If validations should be deferred until transform starts, defaults to false." } }, "body": { - "description" : "The update data frame transform definition", + "description" : "The update transform definition", "required": true } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/10_basic.yml index ce4751d690d8..265ef1f8cc99 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/10_basic.yml @@ -171,6 +171,10 @@ --- "Bulk indexing of monitoring data on closed indices should throw an export exception": + - skip: + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/30101" + - do: monitoring.bulk: system_id: "beats" diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/20_privileges.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/20_privileges.yml index 437ce21d0c82..c9e69fc0c764 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/20_privileges.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/20_privileges.yml @@ -77,6 +77,8 @@ teardown: "Monitoring Bulk API": - skip: features: catch_unauthorized + version: "all" + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/30101" - do: headers: diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/20_has_application_privs.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/20_has_application_privs.yml index eb92cc252b56..b23862c5553d 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/20_has_application_privs.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/20_has_application_privs.yml @@ -109,6 +109,27 @@ setup: ] } + - do: + security.put_role: + name: "role_containing_wildcard_app_name_and_plain_app_name" + body: > + { + "cluster": [], + "indices": [], + "applications": [ + { + "application": "myapp", + "privileges": ["user"], + "resources": ["*"] + }, + { + "application": "yourapp-*", + "privileges": ["read"], + "resources": ["*"] + } + ] + } + # And a user for each role - do: security.put_user: @@ -134,6 +155,14 @@ setup: "password": "p@ssw0rd", "roles" : [ "yourapp_read_config" ] } + - do: + security.put_user: + username: "myapp_yourapp_wildard_role_user" + body: > + { + "password": "p@ssw0rd", + "roles" : [ "role_containing_wildcard_app_name_and_plain_app_name" ] + } --- teardown: @@ -168,6 +197,11 @@ teardown: username: "your_read" ignore: 404 + - do: + security.delete_user: + username: "myapp_yourapp_wildard_role_user" + ignore: 404 + - do: security.delete_role: name: "myapp_engineering_read" @@ -182,6 +216,12 @@ teardown: security.delete_role: name: "yourapp_read_config" ignore: 404 + + - do: + security.delete_role: + name: "role_containing_wildcard_app_name_and_plain_app_name" + ignore: 404 + --- "Test has_privileges with application-privileges": - do: @@ -291,3 +331,48 @@ teardown: } } } } + + - do: + headers: { Authorization: "Basic bXlhcHBfeW91cmFwcF93aWxkYXJkX3JvbGVfdXNlcjpwQHNzdzByZA==" } # myapp_yourapp_wildard_role_user + security.has_privileges: + user: null + body: > + { + "application": [ + { + "application" : "myapp", + "resources" : [ "*" ], + "privileges" : [ "action:login" ] + }, + { + "application" : "yourapp-v1", + "resources" : [ "*" ], + "privileges" : [ "read" ] + }, + { + "application" : "yourapp-v2", + "resources" : [ "*" ], + "privileges" : [ "read" ] + } + ] + } + + - match: { "username" : "myapp_yourapp_wildard_role_user" } + - match: { "has_all_requested" : true } + - match: { "application" : { + "myapp" : { + "*" : { + "action:login" : true + } + }, + "yourapp-v1": { + "*": { + "read": true + } + }, + "yourapp-v2": { + "*": { + "read": true + } + } + } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/roles/30_prohibited_role_query.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/roles/30_prohibited_role_query.yml index abddcdc6dda6..afa4c078b25c 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/roles/30_prohibited_role_query.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/roles/30_prohibited_role_query.yml @@ -7,7 +7,11 @@ setup: cluster.health: wait_for_status: yellow +--- +"Test use prohibited query inside role query": + - do: + catch: /terms query with terms lookup isn't supported as part of a role query/ security.put_role: name: "role" body: > @@ -18,55 +22,9 @@ setup: "names": "index", "privileges": ["all"], "query" : { - "terms" : { "field" : { "index" : "_index", "type" : "_type", "id" : "_id", "path" : "_path"} } + "terms" : { "field" : { "index" : "_index", "id" : "_id", "path" : "_path"} } } } ] } - - do: - security.put_user: - username: "joe" - body: > - { - "password": "x-pack-test-password", - "roles" : [ "role" ] - } - ---- -teardown: - - do: - security.delete_user: - username: "joe" - ignore: 404 - - do: - security.delete_role: - name: "role" - ignore: 404 - - ---- -"Test use prohibited query inside role query": - - - do: - headers: - Authorization: "Basic am9lOngtcGFjay10ZXN0LXBhc3N3b3Jk" - index: - index: index - type: type - id: 1 - body: > - { - "foo": "bar" - } - - - - do: - catch: /terms query with terms lookup isn't supported as part of a role query/ - headers: - Authorization: "Basic am9lOngtcGFjay10ZXN0LXBhc3N3b3Jk" - search: - rest_total_hits_as_int: true - index: index - body: { "query" : { "match_all" : {} } } - diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/preview_transforms.yml similarity index 94% rename from x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml rename to x-pack/plugin/src/test/resources/rest-api-spec/test/transform/preview_transforms.yml index 30c7ec626877..35289c2bbd09 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/preview_transforms.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/preview_transforms.yml @@ -68,7 +68,7 @@ setup: --- "Test preview transform": - do: - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, @@ -120,7 +120,7 @@ setup: } - match: { acknowledged: true } - do: - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, @@ -154,7 +154,7 @@ setup: "Test preview transform with invalid config": - do: catch: /\[data_frame_terms_group\] unknown field \[not_a_terms_param\]/ - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, @@ -167,7 +167,7 @@ setup: "Test preview with non-existing source index": - do: catch: /Source index \[does_not_exist\] does not exist/ - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": ["airline-data", "does_not_exist"] }, @@ -182,7 +182,7 @@ setup: "Test preview returns bad request with invalid agg": - do: catch: bad_request - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, @@ -198,7 +198,7 @@ setup: - do: catch: /field \[time\] cannot be both an object and a field/ - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, @@ -213,7 +213,7 @@ setup: } - do: catch: /field \[super_metric\] cannot be both an object and a field/ - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, @@ -238,7 +238,7 @@ setup: "Test preview with missing pipeline": - do: catch: bad_request - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, @@ -256,7 +256,7 @@ setup: "Test preview with unsupported agg": - do: catch: bad_request - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, @@ -271,7 +271,7 @@ setup: } - do: catch: /Unsupported aggregation type \[terms\]/ - data_frame.preview_data_frame_transform: + transform.preview_transform: body: > { "source": { "index": "airline-data" }, diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_crud.yml similarity index 90% rename from x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml rename to x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_crud.yml index 57260996bae2..fd9f75735e26 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_crud.yml @@ -17,14 +17,14 @@ setup: --- "Test get all transforms when there are none": - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "*" - match: { count: 0 } - match: { transforms: [] } - do: catch: missing - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "*" allow_no_match: false @@ -32,21 +32,21 @@ setup: "Test get transform when it does not exist": - do: catch: /Transform with id \[missing-transform-id\] could not be found/ - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "missing-transform-id" --- "Test delete transform when it does not exist": - do: catch: missing - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "missing transform" --- "Test put transform with frequency too low": - do: catch: /minimum permitted \[frequency\] is \[1s\]/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "frequency-too-low" body: > { @@ -63,7 +63,7 @@ setup: "Test put transform with frequency too high": - do: catch: /highest permitted \[frequency\] is \[1h\]/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "frequency-too-low" body: > { @@ -80,7 +80,7 @@ setup: "Test put transform with invalid source index": - do: catch: /Source index \[missing-index\] does not exist/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "missing-source-transform" body: > { @@ -92,7 +92,7 @@ setup: } } - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "missing-source-transform" defer_validation: true body: > @@ -108,7 +108,7 @@ setup: --- "Test basic transform crud": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" body: > { @@ -123,7 +123,7 @@ setup: - match: { acknowledged: true } - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-dos" body: > { @@ -137,7 +137,7 @@ setup: - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "airline-transform" - match: { count: 1 } - match: { transforms.0.id: "airline-transform" } @@ -151,7 +151,7 @@ setup: - match: { transforms.0.description: "yaml test transform on airline-data" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "*" - match: { count: 2 } - match: { transforms.0.id: "airline-transform" } @@ -159,27 +159,27 @@ setup: - is_false: transforms.1.description - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "_all" - match: { count: 2 } - match: { transforms.0.id: "airline-transform" } - match: { transforms.1.id: "airline-transform-dos" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "airline-transform,airline-transform-dos" - match: { count: 2 } - match: { transforms.0.id: "airline-transform" } - match: { transforms.1.id: "airline-transform-dos" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "airline-transform*" - match: { count: 2 } - match: { transforms.0.id: "airline-transform" } - match: { transforms.1.id: "airline-transform-dos" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "airline-transform*" from: 0 size: 1 @@ -187,7 +187,7 @@ setup: - match: { transforms.0.id: "airline-transform" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "airline-transform*" from: 1 size: 1 @@ -212,7 +212,7 @@ setup: type: integer - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" body: > { @@ -228,7 +228,7 @@ setup: } - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "airline-transform" - match: { count: 1 } - match: { transforms.0.id: "airline-transform" } @@ -241,7 +241,7 @@ setup: --- "Test PUT continuous transform": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-continuous" body: > { @@ -262,7 +262,7 @@ setup: } - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "airline-transform-continuous" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-continuous" } @@ -275,7 +275,7 @@ setup: --- "Test PUT continuous transform without delay set": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-continuous" body: > { @@ -295,7 +295,7 @@ setup: } - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "airline-transform-continuous" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-continuous" } @@ -309,7 +309,7 @@ setup: "Test transform with invalid page parameter": - do: catch: /Param \[size\] has a max acceptable value of \[1000\]/ - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "_all" from: 0 size: 10000 @@ -317,7 +317,7 @@ setup: "Test transform where dest is included in source": - do: catch: /Destination index \[airline-data-by-airline\] is included in source expression \[airline-data/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" body: > { @@ -332,7 +332,7 @@ setup: } - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" defer_validation: true body: > @@ -363,7 +363,7 @@ setup: index: created-destination-index name: dest-index - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "transform-from-aliases" body: > { @@ -385,7 +385,7 @@ setup: - do: catch: /Destination index \[created-destination-index\] is included in source expression \[airline-data,created-destination-index\]/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "transform-from-aliases-failures" body: > { @@ -411,7 +411,7 @@ setup: - do: catch: /Destination index \[dest-index\] should refer to a single index/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" body: > { @@ -428,7 +428,7 @@ setup: "Test put config with invalid pivot size": - do: catch: /pivot\.max_page_search_size \[5\] must be greater than 10 and less than 10,000/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" body: > { @@ -442,7 +442,7 @@ setup: } - do: catch: /pivot\.max_page_search_size \[15000\] must be greater than 10 and less than 10,000/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" body: > { @@ -458,7 +458,7 @@ setup: "Test creation failures due to duplicate and conflicting field names": - do: catch: /duplicate field \[airline\] detected/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "duplicate-field-transform" body: > { @@ -473,7 +473,7 @@ setup: } - do: catch: /field \[airline\] cannot be both an object and a field/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "duplicate-field-transform" body: > { @@ -487,10 +487,10 @@ setup: } } --- -"Test invalid data frame id": +"Test invalid transform id": - do: catch: /can contain lowercase alphanumeric \(a-z and 0-9\), hyphens or underscores; must start and end with alphanumeric/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "!@#$%^&*(duplicate-field-transform" body: > { @@ -505,7 +505,7 @@ setup: } - do: catch: /The id cannot contain more than 64 character/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" body: > { @@ -522,7 +522,7 @@ setup: "Test invalid destination index name": - do: catch: /dest\.index \[DeStInAtIoN\] must be lowercase/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" body: > { @@ -537,7 +537,7 @@ setup: } - do: catch: /Invalid index name \[destination#dest\], must not contain \'#\'/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform" body: > { @@ -556,7 +556,7 @@ setup: - do: catch: /Found \[create_time\], not allowed for strict parsing/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-with-create-time" body: > { @@ -575,7 +575,7 @@ setup: - do: catch: /Found \[version\], not allowed for strict parsing/ - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-with-version" body: > { @@ -591,7 +591,7 @@ setup: --- "Test force deleting a running transform": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-start-delete" body: > { @@ -610,12 +610,12 @@ setup: } - match: { acknowledged: true } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-delete" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-delete" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-delete" } @@ -623,11 +623,11 @@ setup: - do: catch: /Cannot delete transform \[airline-transform-start-delete\] as the task is running/ - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-start-delete" - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-start-delete" force: true - match: { acknowledged: true } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_start_stop.yml similarity index 81% rename from x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml rename to x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_start_stop.yml index 54805ffd7e73..ad28d880faf7 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_start_stop.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_start_stop.yml @@ -17,7 +17,7 @@ setup: event_rate: type: integer - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-start-stop" body: > { @@ -32,18 +32,18 @@ setup: --- teardown: - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-start-stop" timeout: "10m" wait_for_completion: true - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-start-stop" --- "Test start transform": - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } @@ -51,25 +51,25 @@ teardown: "Test start missing transform": - do: catch: missing - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "missing-transform" --- "Test start already started transform": - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } - do: catch: /Cannot start transform \[airline-transform-start-stop\] as it is already started/ - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" --- "Verify start transform creates destination index with appropriate mapping": - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } - do: @@ -84,7 +84,7 @@ teardown: indices.create: index: airline-data-by-airline-start-stop - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } - do: @@ -94,37 +94,37 @@ teardown: --- "Test start/stop/start transform": - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-stop" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-start-stop" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-stop" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop" } - match: { transforms.0.state: "stopped" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-stop" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop" } @@ -132,7 +132,7 @@ teardown: --- "Test start/stop/start continuous transform": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-start-stop-continuous" body: > { @@ -151,82 +151,82 @@ teardown: } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop-continuous" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-stop-continuous" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop-continuous" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-start-stop-continuous" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-stop-continuous" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop-continuous" } - match: { transforms.0.state: "stopped" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop-continuous" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-stop-continuous" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop-continuous" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-start-stop-continuous" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-start-stop-continuous" --- "Test stop missing transform": - do: catch: missing - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "missing-transform" --- "Test stop missing transform by expression": - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: allow_no_match: true transform_id: "missing-transform*" - do: catch: missing - data_frame.stop_data_frame_transform: + transform.stop_transform: allow_no_match: false transform_id: "missing-transform*" --- "Test stop already stopped transform": - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } --- "Test start/stop only starts/stops specified transform": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-start-later" body: > { @@ -238,57 +238,57 @@ teardown: } } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-stop" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-stop" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-later" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-later" } - match: { transforms.0.state: "stopped" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-later" - match: { acknowledged: true } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-start-stop" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-start-later" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-start-later" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-start-later" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-start-later" --- "Test stop all": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-stop-all" body: > { @@ -300,35 +300,35 @@ teardown: } } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-stop-all" - match: { acknowledged: true } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-start-stop" - match: { acknowledged: true } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "_all" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "*" - match: { count: 2 } - match: { transforms.0.state: "stopped" } - match: { transforms.1.state: "stopped" } - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-stop-all" --- "Test start/stop with field alias": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline_via_field_alias" body: > { @@ -340,7 +340,7 @@ teardown: } } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline_via_field_alias" - match: { acknowledged: true } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_stats.yml similarity index 87% rename from x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml rename to x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_stats.yml index b4699898d483..5f4c11f00d08 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_stats.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_stats.yml @@ -14,7 +14,7 @@ setup: event_rate: type: integer - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-stats" body: > { @@ -26,24 +26,24 @@ setup: } } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-stats" --- teardown: - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-stats" wait_for_completion: true - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-stats" --- "Test get transform stats": - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-stats" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-stats" } @@ -63,7 +63,7 @@ teardown: --- "Test get transform stats on missing transform": - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "missing-transform" - match: { count: 0 } - match: { transforms: [] } @@ -71,7 +71,7 @@ teardown: --- "Test get multiple transform stats": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-stats-dos" body: > { @@ -83,7 +83,7 @@ teardown: } } - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-stats-the-third" body: > { @@ -95,7 +95,7 @@ teardown: } } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "*" - match: { count: 3 } - match: { transforms.0.id: "airline-transform-stats" } @@ -103,7 +103,7 @@ teardown: - match: { transforms.2.id: "airline-transform-stats-the-third" } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "_all" - match: { count: 3 } - match: { transforms.0.id: "airline-transform-stats" } @@ -111,14 +111,14 @@ teardown: - match: { transforms.2.id: "airline-transform-stats-the-third" } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-stats-dos,airline-transform-stats-the*" - match: { count: 2 } - match: { transforms.0.id: "airline-transform-stats-dos" } - match: { transforms.1.id: "airline-transform-stats-the-third" } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "_all" from: 0 size: 1 @@ -126,7 +126,7 @@ teardown: - match: { transforms.0.id: "airline-transform-stats" } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "_all" from: 1 size: 2 @@ -135,18 +135,18 @@ teardown: - match: { transforms.1.id: "airline-transform-stats-the-third" } - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-stats-dos" - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-stats-the-third" --- "Test get multiple transform stats where one does not have a task": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-stats-dos" body: > { @@ -158,14 +158,14 @@ teardown: } } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "*" - match: { count: 2 } - match: { transforms.0.id: "airline-transform-stats" } - match: { transforms.1.id: "airline-transform-stats-dos" } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "_all" - match: { count: 2 } - match: { transforms.0.id: "airline-transform-stats" } @@ -175,7 +175,7 @@ teardown: "Test get single transform stats when it does not have a task": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-stats-dos" body: > { @@ -187,7 +187,7 @@ teardown: } } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-stats-dos" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-stats-dos" } @@ -206,7 +206,7 @@ teardown: --- "Test get continuous transform stats": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "airline-transform-stats-continuous" body: > { @@ -219,10 +219,10 @@ teardown: "sync": { "time": { "field": "time", "delay": "1m" } } } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "airline-transform-stats-continuous" - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "airline-transform-stats-continuous" - match: { count: 1 } - match: { transforms.0.id: "airline-transform-stats-continuous" } @@ -251,10 +251,10 @@ teardown: - match: { transforms.0.stats.exponential_avg_documents_processed: 0.0 } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "airline-transform-stats-continuous" wait_for_completion: true - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "airline-transform-stats-continuous" diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_update.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_update.yml similarity index 91% rename from x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_update.yml rename to x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_update.yml index f764258e5afe..5b054a27fa38 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/data_frame/transforms_update.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/transform/transforms_update.yml @@ -15,7 +15,7 @@ setup: type: integer - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "updating-airline-transform" body: > { @@ -38,7 +38,7 @@ setup: "Test update transform with missing transform": - do: catch: /Transform with id \[missing-transform\] could not be found/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "missing-transform" body: > { @@ -48,7 +48,7 @@ setup: "Test update transform with frequency too low": - do: catch: /minimum permitted \[frequency\] is \[1s\]/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { @@ -58,7 +58,7 @@ setup: "Test update transform with frequency too high": - do: catch: /highest permitted \[frequency\] is \[1h\]/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { @@ -68,14 +68,14 @@ setup: "Test put transform with invalid source index": - do: catch: /Source index \[missing-index\] does not exist/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { "source": { "index": "missing-index" } } - do: - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" defer_validation: true body: > @@ -87,7 +87,7 @@ setup: --- "Test update transform when it is batch": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "batch-airline-transform" body: > { @@ -99,7 +99,7 @@ setup: } } - do: - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "batch-airline-transform" body: > { @@ -109,7 +109,7 @@ setup: - do: catch: /Cannot change the current sync configuration of transform \[batch-airline-transform\] from \[null\] to \[time\]/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "batch-airline-transform" body: > { @@ -137,7 +137,7 @@ setup: event_rate: type: integer - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "updating-airline-transform" - match: { count: 1 } - match: { transforms.0.id: "updating-airline-transform" } @@ -154,7 +154,7 @@ setup: - match: { transforms.0.frequency: "60s" } - do: - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { @@ -180,7 +180,7 @@ setup: - match: { frequency: "5s" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "updating-airline-transform" - match: { count: 1 } - match: { transforms.0.id: "updating-airline-transform" } @@ -200,7 +200,7 @@ setup: "Test transform where dest is included in source": - do: catch: /Destination index \[airline-data-by-airline\] is included in source expression \[airline-data/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { @@ -211,7 +211,7 @@ setup: } - do: - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" defer_validation: true body: > @@ -238,7 +238,7 @@ setup: index: created-destination-index name: dest-index - do: - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { @@ -256,7 +256,7 @@ setup: - do: catch: /Destination index \[created-destination-index\] is included in source expression \[created-destination-index\]/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { @@ -281,7 +281,7 @@ setup: name: dest2-index - do: catch: /Destination index \[dest2-index\] should refer to a single index/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { @@ -291,7 +291,7 @@ setup: "Test invalid destination index name": - do: catch: /dest\.index \[DeStInAtIoN\] must be lowercase/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { @@ -299,7 +299,7 @@ setup: } - do: catch: /Invalid index name \[destination#dest\], must not contain \'#\'/ - data_frame.update_data_frame_transform: + transform.update_transform: transform_id: "updating-airline-transform" body: > { diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformIT.java index d394636507cc..46c53b8204b2 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformIT.java @@ -16,9 +16,9 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate; -import org.elasticsearch.client.transform.transforms.DataFrameTransformStats; +import org.elasticsearch.client.transform.transforms.TransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdate; +import org.elasticsearch.client.transform.transforms.TransformStats; import org.elasticsearch.client.transform.transforms.DestConfig; import org.elasticsearch.client.transform.transforms.TimeSyncConfig; import org.elasticsearch.client.transform.transforms.pivot.SingleGroupSource; @@ -65,7 +65,7 @@ public class TransformIT extends TransformIntegTestCase { .addAggregator(AggregationBuilders.avg("review_score").field("stars")) .addAggregator(AggregationBuilders.max("timestamp").field("timestamp")); - DataFrameTransformConfig config = createTransformConfig("transform-crud", + TransformConfig config = createTransformConfig("transform-crud", groups, aggs, "reviews-by-user-business-day", @@ -78,7 +78,7 @@ public class TransformIT extends TransformIntegTestCase { stopTransform(config.getId()); - DataFrameTransformConfig storedConfig = getTransform(config.getId()).getTransformConfigurations().get(0); + TransformConfig storedConfig = getTransform(config.getId()).getTransformConfigurations().get(0); assertThat(storedConfig.getVersion(), equalTo(Version.CURRENT)); Instant now = Instant.now(); assertTrue("[create_time] is not before current time", storedConfig.getCreateTime().isBefore(now)); @@ -98,7 +98,7 @@ public class TransformIT extends TransformIntegTestCase { .addAggregator(AggregationBuilders.avg("review_score").field("stars")) .addAggregator(AggregationBuilders.max("timestamp").field("timestamp")); - DataFrameTransformConfig config = createTransformConfigBuilder("transform-crud", + TransformConfig config = createTransformConfigBuilder("transform-crud", groups, aggs, "reviews-by-user-business-day", @@ -112,7 +112,7 @@ public class TransformIT extends TransformIntegTestCase { waitUntilCheckpoint(config.getId(), 1L); assertThat(getTransformStats(config.getId()).getTransformsStats().get(0).getState(), - equalTo(DataFrameTransformStats.State.STARTED)); + equalTo(TransformStats.State.STARTED)); long docsIndexed = getTransformStats(config.getId()) .getTransformsStats() @@ -120,7 +120,7 @@ public class TransformIT extends TransformIntegTestCase { .getIndexerStats() .getNumDocuments(); - DataFrameTransformConfig storedConfig = getTransform(config.getId()).getTransformConfigurations().get(0); + TransformConfig storedConfig = getTransform(config.getId()).getTransformConfigurations().get(0); assertThat(storedConfig.getVersion(), equalTo(Version.CURRENT)); Instant now = Instant.now(); assertTrue("[create_time] is not before current time", storedConfig.getCreateTime().isBefore(now)); @@ -155,7 +155,7 @@ public class TransformIT extends TransformIntegTestCase { String id = "transform-to-update"; String dest = "reviews-by-user-business-day-to-update"; - DataFrameTransformConfig config = createTransformConfigBuilder(id, + TransformConfig config = createTransformConfigBuilder(id, groups, aggs, dest, @@ -169,7 +169,7 @@ public class TransformIT extends TransformIntegTestCase { waitUntilCheckpoint(config.getId(), 1L); assertThat(getTransformStats(config.getId()).getTransformsStats().get(0).getState(), - oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING)); + oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING)); long docsIndexed = getTransformStats(config.getId()) .getTransformsStats() @@ -177,13 +177,13 @@ public class TransformIT extends TransformIntegTestCase { .getIndexerStats() .getNumDocuments(); - DataFrameTransformConfig storedConfig = getTransform(config.getId()).getTransformConfigurations().get(0); + TransformConfig storedConfig = getTransform(config.getId()).getTransformConfigurations().get(0); assertThat(storedConfig.getVersion(), equalTo(Version.CURRENT)); Instant now = Instant.now(); assertTrue("[create_time] is not before current time", storedConfig.getCreateTime().isBefore(now)); String pipelineId = "add_forty_two"; - DataFrameTransformConfigUpdate update = DataFrameTransformConfigUpdate.builder() + TransformConfigUpdate update = TransformConfigUpdate.builder() .setDescription("updated config") .setDest(DestConfig.builder().setIndex(dest).setPipeline(pipelineId).build()) .build(); diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java index a53c948b69c2..0322ec8d9873 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java @@ -16,22 +16,22 @@ import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.core.AcknowledgedResponse; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.CreateIndexResponse; -import org.elasticsearch.client.transform.DeleteDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformResponse; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsResponse; -import org.elasticsearch.client.transform.PutDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformRequest; -import org.elasticsearch.client.transform.StartDataFrameTransformResponse; -import org.elasticsearch.client.transform.StopDataFrameTransformRequest; -import org.elasticsearch.client.transform.StopDataFrameTransformResponse; -import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate; +import org.elasticsearch.client.transform.DeleteTransformRequest; +import org.elasticsearch.client.transform.GetTransformRequest; +import org.elasticsearch.client.transform.GetTransformResponse; +import org.elasticsearch.client.transform.GetTransformStatsRequest; +import org.elasticsearch.client.transform.GetTransformStatsResponse; +import org.elasticsearch.client.transform.PutTransformRequest; +import org.elasticsearch.client.transform.StartTransformRequest; +import org.elasticsearch.client.transform.StartTransformResponse; +import org.elasticsearch.client.transform.StopTransformRequest; +import org.elasticsearch.client.transform.StopTransformResponse; +import org.elasticsearch.client.transform.UpdateTransformRequest; import org.elasticsearch.client.transform.transforms.DestConfig; import org.elasticsearch.client.transform.transforms.QueryConfig; import org.elasticsearch.client.transform.transforms.SourceConfig; +import org.elasticsearch.client.transform.transforms.TransformConfig; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdate; import org.elasticsearch.client.transform.transforms.pivot.AggregationConfig; import org.elasticsearch.client.transform.transforms.pivot.DateHistogramGroupSource; import org.elasticsearch.client.transform.transforms.pivot.GroupConfig; @@ -70,7 +70,7 @@ import static org.hamcrest.core.Is.is; abstract class TransformIntegTestCase extends ESRestTestCase { - private Map transformConfigs = new HashMap<>(); + private Map transformConfigs = new HashMap<>(); protected void cleanUp() throws IOException { cleanUpTransforms(); @@ -78,54 +78,54 @@ abstract class TransformIntegTestCase extends ESRestTestCase { } protected void cleanUpTransforms() throws IOException { - for (DataFrameTransformConfig config : transformConfigs.values()) { + for (TransformConfig config : transformConfigs.values()) { stopTransform(config.getId()); deleteTransform(config.getId()); } transformConfigs.clear(); } - protected StopDataFrameTransformResponse stopTransform(String id) throws IOException { + protected StopTransformResponse stopTransform(String id) throws IOException { RestHighLevelClient restClient = new TestRestHighLevelClient(); - return restClient.dataFrame().stopDataFrameTransform(new StopDataFrameTransformRequest(id, true, null), RequestOptions.DEFAULT); + return restClient.transform().stopTransform(new StopTransformRequest(id, true, null), RequestOptions.DEFAULT); } - protected StartDataFrameTransformResponse startTransform(String id, RequestOptions options) throws IOException { + protected StartTransformResponse startTransform(String id, RequestOptions options) throws IOException { RestHighLevelClient restClient = new TestRestHighLevelClient(); - return restClient.dataFrame().startDataFrameTransform(new StartDataFrameTransformRequest(id), options); + return restClient.transform().startTransform(new StartTransformRequest(id), options); } protected AcknowledgedResponse deleteTransform(String id) throws IOException { RestHighLevelClient restClient = new TestRestHighLevelClient(); AcknowledgedResponse response = - restClient.dataFrame().deleteDataFrameTransform(new DeleteDataFrameTransformRequest(id), RequestOptions.DEFAULT); + restClient.transform().deleteTransform(new DeleteTransformRequest(id), RequestOptions.DEFAULT); if (response.isAcknowledged()) { transformConfigs.remove(id); } return response; } - protected AcknowledgedResponse putTransform(DataFrameTransformConfig config, RequestOptions options) throws IOException { + protected AcknowledgedResponse putTransform(TransformConfig config, RequestOptions options) throws IOException { if (transformConfigs.keySet().contains(config.getId())) { throw new IllegalArgumentException("transform [" + config.getId() + "] is already registered"); } RestHighLevelClient restClient = new TestRestHighLevelClient(); AcknowledgedResponse response = - restClient.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(config), options); + restClient.transform().putTransform(new PutTransformRequest(config), options); if (response.isAcknowledged()) { transformConfigs.put(config.getId(), config); } return response; } - protected GetDataFrameTransformStatsResponse getTransformStats(String id) throws IOException { + protected GetTransformStatsResponse getTransformStats(String id) throws IOException { RestHighLevelClient restClient = new TestRestHighLevelClient(); - return restClient.dataFrame().getDataFrameTransformStats(new GetDataFrameTransformStatsRequest(id), RequestOptions.DEFAULT); + return restClient.transform().getTransformStats(new GetTransformStatsRequest(id), RequestOptions.DEFAULT); } - protected GetDataFrameTransformResponse getTransform(String id) throws IOException { + protected GetTransformResponse getTransform(String id) throws IOException { RestHighLevelClient restClient = new TestRestHighLevelClient(); - return restClient.dataFrame().getDataFrameTransform(new GetDataFrameTransformRequest(id), RequestOptions.DEFAULT); + return restClient.transform().getTransform(new GetTransformRequest(id), RequestOptions.DEFAULT); } protected void waitUntilCheckpoint(String id, long checkpoint) throws Exception { @@ -195,21 +195,21 @@ abstract class TransformIntegTestCase extends ESRestTestCase { return builder.build(); } - protected DataFrameTransformConfig createTransformConfig(String id, - Map groups, - AggregatorFactories.Builder aggregations, - String destinationIndex, - String... sourceIndices) throws Exception { + protected TransformConfig createTransformConfig(String id, + Map groups, + AggregatorFactories.Builder aggregations, + String destinationIndex, + String... sourceIndices) throws Exception { return createTransformConfig(id, groups, aggregations, destinationIndex, QueryBuilders.matchAllQuery(), sourceIndices); } - protected DataFrameTransformConfig.Builder createTransformConfigBuilder(String id, - Map groups, - AggregatorFactories.Builder aggregations, - String destinationIndex, - QueryBuilder queryBuilder, - String... sourceIndices) throws Exception { - return DataFrameTransformConfig.builder() + protected TransformConfig.Builder createTransformConfigBuilder(String id, + Map groups, + AggregatorFactories.Builder aggregations, + String destinationIndex, + QueryBuilder queryBuilder, + String... sourceIndices) throws Exception { + return TransformConfig.builder() .setId(id) .setSource(SourceConfig.builder().setIndex(sourceIndices).setQueryConfig(createQueryConfig(queryBuilder)).build()) .setDest(DestConfig.builder().setIndex(destinationIndex).build()) @@ -218,12 +218,12 @@ abstract class TransformIntegTestCase extends ESRestTestCase { .setDescription("Test transform config id: " + id); } - protected DataFrameTransformConfig createTransformConfig(String id, - Map groups, - AggregatorFactories.Builder aggregations, - String destinationIndex, - QueryBuilder queryBuilder, - String... sourceIndices) throws Exception { + protected TransformConfig createTransformConfig(String id, + Map groups, + AggregatorFactories.Builder aggregations, + String destinationIndex, + QueryBuilder queryBuilder, + String... sourceIndices) throws Exception { return createTransformConfigBuilder(id, groups, aggregations, destinationIndex, queryBuilder, sourceIndices).build(); } @@ -233,9 +233,9 @@ abstract class TransformIntegTestCase extends ESRestTestCase { assertThat(response.buildFailureMessage(), response.hasFailures(), is(false)); } - protected void updateConfig(String id, DataFrameTransformConfigUpdate update) throws Exception { + protected void updateConfig(String id, TransformConfigUpdate update) throws Exception { RestHighLevelClient restClient = new TestRestHighLevelClient(); - restClient.dataFrame().updateDataFrameTransform(new UpdateDataFrameTransformRequest(update, id), RequestOptions.DEFAULT); + restClient.transform().updateTransform(new UpdateTransformRequest(update, id), RequestOptions.DEFAULT); } protected void createReviewsIndex(String indexName, int numDocs) throws Exception { diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformAuditorIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformAuditorIT.java index 3c433c48bdd5..01abbbae7d75 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformAuditorIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformAuditorIT.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.transform.integration; import org.elasticsearch.client.Request; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.junit.Before; import java.io.IOException; @@ -63,15 +63,15 @@ public class TransformAuditorIT extends TransformRestTestCase { startAndWaitForTransform(transformId, transformIndex, BASIC_AUTH_VALUE_TRANSFORM_ADMIN_WITH_SOME_DATA_ACCESS); // Make sure we wrote to the audit - final Request request = new Request("GET", TransformInternalIndex.AUDIT_INDEX + "/_search"); + final Request request = new Request("GET", TransformInternalIndexConstants.AUDIT_INDEX + "/_search"); request.setJsonEntity("{\"query\":{\"term\":{\"transform_id\":\"simple_pivot_for_audit\"}}}"); assertBusy(() -> { - assertTrue(indexExists(TransformInternalIndex.AUDIT_INDEX)); + assertTrue(indexExists(TransformInternalIndexConstants.AUDIT_INDEX)); }); // Since calls to write the AbstractAuditor are sent and forgot (async) we could have returned from the start, // finished the job (as this is a very short DF job), all without the audit being fully written. assertBusy(() -> { - refreshIndex(TransformInternalIndex.AUDIT_INDEX); + refreshIndex(TransformInternalIndexConstants.AUDIT_INDEX); Map response = entityAsMap(client().performRequest(request)); List hitList = ((List) ((Map)response.get("hits")).get("hits")); assertThat(hitList, is(not(empty()))); diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformConfigurationIndexIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformConfigurationIndexIT.java index ad360c658164..4b4845ef52b3 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformConfigurationIndexIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformConfigurationIndexIT.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import java.io.IOException; @@ -42,13 +42,13 @@ public class TransformConfigurationIndexIT extends TransformRestTestCase { builder.endObject(); final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); Request req = new Request("PUT", - TransformInternalIndex.LATEST_INDEX_NAME + "/_doc/" + TransformConfig.documentId(fakeTransformName)); + TransformInternalIndexConstants.LATEST_INDEX_NAME + "/_doc/" + TransformConfig.documentId(fakeTransformName)); req.setEntity(entity); client().performRequest(req); } // refresh the index - assertOK(client().performRequest(new Request("POST", TransformInternalIndex.LATEST_INDEX_NAME + "/_refresh"))); + assertOK(client().performRequest(new Request("POST", TransformInternalIndexConstants.LATEST_INDEX_NAME + "/_refresh"))); Request deleteRequest = new Request("DELETE", TRANSFORM_ENDPOINT + fakeTransformName); Response deleteResponse = client().performRequest(deleteRequest); diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformInternalIndexIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformInternalIndexIT.java index 3d78c3e543aa..c962befb16ee 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformInternalIndexIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformInternalIndexIT.java @@ -17,13 +17,13 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.search.SearchModule; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.client.indices.CreateIndexRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformRequest; -import org.elasticsearch.client.transform.GetDataFrameTransformResponse; -import org.elasticsearch.client.transform.UpdateDataFrameTransformRequest; -import org.elasticsearch.client.transform.UpdateDataFrameTransformResponse; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfigUpdate; +import org.elasticsearch.client.transform.GetTransformRequest; +import org.elasticsearch.client.transform.GetTransformResponse; +import org.elasticsearch.client.transform.UpdateTransformRequest; +import org.elasticsearch.client.transform.UpdateTransformResponse; +import org.elasticsearch.client.transform.transforms.TransformConfigUpdate; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; @@ -42,8 +42,8 @@ import static org.hamcrest.Matchers.equalTo; public class TransformInternalIndexIT extends ESRestTestCase { - private static final String CURRENT_INDEX = TransformInternalIndex.LATEST_INDEX_NAME; - private static final String OLD_INDEX = TransformInternalIndex.INDEX_PATTERN + "1"; + private static final String CURRENT_INDEX = TransformInternalIndexConstants.LATEST_INDEX_NAME; + private static final String OLD_INDEX = TransformInternalIndexConstants.INDEX_PATTERN + "1"; public void testUpdateDeletesOldTransformConfig() throws Exception { @@ -88,12 +88,12 @@ public class TransformInternalIndexIT extends ESRestTestCase { RequestOptions.DEFAULT); assertThat(getResponse.isExists(), is(true)); - GetDataFrameTransformResponse response = client.dataFrame() - .getDataFrameTransform(new GetDataFrameTransformRequest(transformId), RequestOptions.DEFAULT); + GetTransformResponse response = client.transform() + .getTransform(new GetTransformRequest(transformId), RequestOptions.DEFAULT); assertThat(response.getTransformConfigurations().get(0).getId(), equalTo(transformId)); - UpdateDataFrameTransformResponse updated = client.dataFrame().updateDataFrameTransform( - new UpdateDataFrameTransformRequest(DataFrameTransformConfigUpdate.builder().setDescription("updated").build(), transformId), + UpdateTransformResponse updated = client.transform().updateTransform( + new UpdateTransformRequest(TransformConfigUpdate.builder().setDescription("updated").build(), transformId), RequestOptions.DEFAULT); assertThat(updated.getTransformConfiguration().getId(), equalTo(transformId)); diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index 372efddc57ca..59d5fe1c1cea 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -20,7 +20,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.transform.TransformField; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.junit.After; import org.junit.AfterClass; @@ -384,7 +384,7 @@ public abstract class TransformRestTestCase extends ESRestTestCase { assertTrue(transformConfigs.isEmpty()); // the configuration index should be empty - Request request = new Request("GET", TransformInternalIndex.LATEST_INDEX_NAME + "/_search"); + Request request = new Request("GET", TransformInternalIndexConstants.LATEST_INDEX_NAME + "/_search"); try { Response searchResponse = adminClient().performRequest(request); Map searchResult = entityAsMap(searchResponse); diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformUsageIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformUsageIT.java index b3b1dd7ccaff..f46c480e53c5 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformUsageIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/TransformUsageIT.java @@ -11,7 +11,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.junit.Before; import java.io.IOException; @@ -53,7 +53,7 @@ public class TransformUsageIT extends TransformRestTestCase { stopTransform("test_usage", false); Request statsExistsRequest = new Request("GET", - TransformInternalIndex.LATEST_INDEX_NAME+"/_search?q=" + + TransformInternalIndexConstants.LATEST_INDEX_NAME+"/_search?q=" + INDEX_DOC_TYPE.getPreferredName() + ":" + TransformStoredDoc.NAME); // Verify that we have one stat document @@ -95,7 +95,7 @@ public class TransformUsageIT extends TransformRestTestCase { XContentMapValues.extractValue("transform.stats." + statName, statsMap)); } // Refresh the index so that statistics are searchable - refreshIndex(TransformInternalIndex.LATEST_INDEX_VERSIONED_NAME); + refreshIndex(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME); }, 60, TimeUnit.SECONDS); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java index 7209dc852909..70c33c373480 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java @@ -46,16 +46,17 @@ import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.transform.TransformNamedXContentProvider; import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction; import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction; import org.elasticsearch.xpack.core.transform.action.PutTransformAction; import org.elasticsearch.xpack.core.transform.action.StartTransformAction; import org.elasticsearch.xpack.core.transform.action.StopTransformAction; import org.elasticsearch.xpack.core.transform.action.UpdateTransformAction; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.transform.action.TransportDeleteTransformAction; -import org.elasticsearch.xpack.transform.action.TransportGetTransformsAction; -import org.elasticsearch.xpack.transform.action.TransportGetTransformsStatsAction; +import org.elasticsearch.xpack.transform.action.TransportGetTransformAction; +import org.elasticsearch.xpack.transform.action.TransportGetTransformStatsAction; import org.elasticsearch.xpack.transform.action.TransportPreviewTransformAction; import org.elasticsearch.xpack.transform.action.TransportPutTransformAction; import org.elasticsearch.xpack.transform.action.TransportStartTransformAction; @@ -63,8 +64,8 @@ import org.elasticsearch.xpack.transform.action.TransportStopTransformAction; import org.elasticsearch.xpack.transform.action.TransportUpdateTransformAction; import org.elasticsearch.xpack.transform.checkpoint.TransformCheckpointService; import org.elasticsearch.xpack.transform.notifications.TransformAuditor; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; import org.elasticsearch.xpack.transform.persistence.TransformConfigManager; +import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; import org.elasticsearch.xpack.transform.rest.action.RestDeleteTransformAction; import org.elasticsearch.xpack.transform.rest.action.RestGetTransformAction; import org.elasticsearch.xpack.transform.rest.action.RestGetTransformStatsAction; @@ -143,8 +144,8 @@ public class Transform extends Plugin implements ActionPlugin, PersistentTaskPlu new ActionHandler<>(StartTransformAction.INSTANCE, TransportStartTransformAction.class), new ActionHandler<>(StopTransformAction.INSTANCE, TransportStopTransformAction.class), new ActionHandler<>(DeleteTransformAction.INSTANCE, TransportDeleteTransformAction.class), - new ActionHandler<>(GetTransformsAction.INSTANCE, TransportGetTransformsAction.class), - new ActionHandler<>(GetTransformsStatsAction.INSTANCE, TransportGetTransformsStatsAction.class), + new ActionHandler<>(GetTransformAction.INSTANCE, TransportGetTransformAction.class), + new ActionHandler<>(GetTransformStatsAction.INSTANCE, TransportGetTransformStatsAction.class), new ActionHandler<>(PreviewTransformAction.INSTANCE, TransportPreviewTransformAction.class), new ActionHandler<>(UpdateTransformAction.INSTANCE, TransportUpdateTransformAction.class), usageAction, @@ -183,12 +184,13 @@ public class Transform extends Plugin implements ActionPlugin, PersistentTaskPlu public UnaryOperator> getIndexTemplateMetaDataUpgrader() { return templates -> { try { - templates.put(TransformInternalIndex.LATEST_INDEX_VERSIONED_NAME, TransformInternalIndex.getIndexTemplateMetaData()); + templates.put(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME, + TransformInternalIndex.getIndexTemplateMetaData()); } catch (IOException e) { logger.error("Error creating data frame index template", e); } try { - templates.put(TransformInternalIndex.AUDIT_INDEX, TransformInternalIndex.getAuditIndexTemplateMetaData()); + templates.put(TransformInternalIndexConstants.AUDIT_INDEX, TransformInternalIndex.getAuditIndexTemplateMetaData()); } catch (IOException e) { logger.warn("Error creating data frame audit index", e); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformInfoTransportAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformInfoTransportAction.java index c940a1329e13..9250ea441f48 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformInfoTransportAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformInfoTransportAction.java @@ -31,7 +31,7 @@ import org.elasticsearch.xpack.core.action.XPackInfoFeatureTransportAction; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import java.util.ArrayList; import java.util.Arrays; @@ -110,7 +110,7 @@ public class TransformInfoTransportAction extends XPackInfoFeatureTransportActio .filter(QueryBuilders.termQuery(TransformField.INDEX_DOC_TYPE.getPreferredName(), TransformStoredDoc.NAME))); - SearchRequestBuilder requestBuilder = client.prepareSearch(TransformInternalIndex.INDEX_NAME_PATTERN) + SearchRequestBuilder requestBuilder = client.prepareSearch(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .setSize(0) .setQuery(queryBuilder); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java index a88ed03d784c..3ef9de9ff0ee 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java @@ -37,7 +37,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import java.util.Arrays; import java.util.Collection; @@ -118,7 +118,7 @@ public class TransformUsageTransportAction extends XPackUsageFeatureTransportAct } ); - SearchRequest totalTransformCount = client.prepareSearch(TransformInternalIndex.INDEX_NAME_PATTERN) + SearchRequest totalTransformCount = client.prepareSearch(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .setTrackTotalHits(true) .setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.boolQuery() .filter(QueryBuilders.termQuery(TransformField.INDEX_DOC_TYPE.getPreferredName(), TransformConfig.NAME)))) diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformAction.java similarity index 81% rename from x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformsAction.java rename to x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformAction.java index 9ffe5941ee58..6e6813aa22c6 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformsAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformAction.java @@ -24,23 +24,23 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.action.AbstractTransportGetResourcesAction; import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.TransformMessages; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction.Request; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction.Response; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction.Request; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction.Response; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import static org.elasticsearch.xpack.core.transform.TransformField.INDEX_DOC_TYPE; -public class TransportGetTransformsAction extends AbstractTransportGetResourcesAction { +public class TransportGetTransformAction extends AbstractTransportGetResourcesAction { @Inject - public TransportGetTransformsAction(TransportService transportService, ActionFilters actionFilters, - Client client, NamedXContentRegistry xContentRegistry) { - super(GetTransformsAction.NAME, transportService, actionFilters, Request::new, client, xContentRegistry); + public TransportGetTransformAction(TransportService transportService, ActionFilters actionFilters, + Client client, NamedXContentRegistry xContentRegistry) { + super(GetTransformAction.NAME, transportService, actionFilters, Request::new, client, xContentRegistry); } @Override @@ -58,7 +58,7 @@ public class TransportGetTransformsAction extends AbstractTransportGetResourcesA @Override protected String[] getIndices() { - return new String[]{TransformInternalIndex.INDEX_NAME_PATTERN}; + return new String[]{TransformInternalIndexConstants.INDEX_NAME_PATTERN}; } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformsStatsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformStatsAction.java similarity index 94% rename from x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformsStatsAction.java rename to x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformStatsAction.java index c2e6675345c3..3941fb62075e 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformsStatsAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformStatsAction.java @@ -23,14 +23,14 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction.Request; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction.Response; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction.Request; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction.Response; +import org.elasticsearch.xpack.core.transform.transforms.NodeAttributes; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo; import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformStats; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc; -import org.elasticsearch.xpack.core.transform.transforms.NodeAttributes; import org.elasticsearch.xpack.transform.checkpoint.TransformCheckpointService; import org.elasticsearch.xpack.transform.persistence.TransformConfigManager; import org.elasticsearch.xpack.transform.transforms.TransformTask; @@ -45,23 +45,23 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; -public class TransportGetTransformsStatsAction extends +public class TransportGetTransformStatsAction extends TransportTasksAction { + GetTransformStatsAction.Request, + GetTransformStatsAction.Response, + GetTransformStatsAction.Response> { - private static final Logger logger = LogManager.getLogger(TransportGetTransformsStatsAction.class); + private static final Logger logger = LogManager.getLogger(TransportGetTransformStatsAction.class); private final TransformConfigManager transformConfigManager; private final TransformCheckpointService transformCheckpointService; @Inject - public TransportGetTransformsStatsAction(TransportService transportService, ActionFilters actionFilters, - ClusterService clusterService, - TransformConfigManager transformsConfigManager, - TransformCheckpointService transformsCheckpointService) { - super(GetTransformsStatsAction.NAME, clusterService, transportService, actionFilters, Request::new, Response::new, + public TransportGetTransformStatsAction(TransportService transportService, ActionFilters actionFilters, + ClusterService clusterService, + TransformConfigManager transformsConfigManager, + TransformCheckpointService transformsCheckpointService) { + super(GetTransformStatsAction.NAME, clusterService, transportService, actionFilters, Request::new, Response::new, Response::new, ThreadPool.Names.SAME); this.transformConfigManager = transformsConfigManager; this.transformCheckpointService = transformsCheckpointService; diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java index f312b3385783..5526dd0c9f05 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java @@ -36,7 +36,7 @@ import org.elasticsearch.xpack.core.transform.action.StopTransformAction.Request import org.elasticsearch.xpack.core.transform.action.StopTransformAction.Response; import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.transform.persistence.TransformConfigManager; import org.elasticsearch.xpack.transform.transforms.TransformTask; @@ -175,7 +175,7 @@ public class TransportStopTransformAction extends TransportTasksAction client.admin() .indices() - .prepareRefresh(TransformInternalIndex.LATEST_INDEX_NAME) + .prepareRefresh(TransformInternalIndexConstants.LATEST_INDEX_NAME) .execute(ActionListener.wrap( r -> listener.onResponse(waitResponse), e -> { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java index 055a3c02f8bb..162616feffe9 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/notifications/TransformAuditor.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.transform.notifications; import org.elasticsearch.client.Client; import org.elasticsearch.xpack.core.common.notifications.AbstractAuditor; import org.elasticsearch.xpack.core.transform.notifications.TransformAuditMessage; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import static org.elasticsearch.xpack.core.ClientHelper.TRANSFORM_ORIGIN; @@ -18,6 +18,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.TRANSFORM_ORIGIN; public class TransformAuditor extends AbstractAuditor { public TransformAuditor(Client client, String nodeName) { - super(client, nodeName, TransformInternalIndex.AUDIT_INDEX, TRANSFORM_ORIGIN, TransformAuditMessage::new); + super(client, nodeName, TransformInternalIndexConstants.AUDIT_INDEX, TRANSFORM_ORIGIN, TransformAuditMessage::new); } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformConfigManager.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformConfigManager.java index 4bb4cbadd6a2..f2faf5085e40 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformConfigManager.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformConfigManager.java @@ -53,6 +53,7 @@ import org.elasticsearch.xpack.core.transform.TransformMessages; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import java.io.IOException; import java.io.InputStream; @@ -111,7 +112,7 @@ public class TransformConfigManager { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { XContentBuilder source = checkpoint.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); - IndexRequest indexRequest = new IndexRequest(TransformInternalIndex.LATEST_INDEX_NAME) + IndexRequest indexRequest = new IndexRequest(TransformInternalIndexConstants.LATEST_INDEX_NAME) .opType(DocWriteRequest.OpType.INDEX) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .id(TransformCheckpoint.documentId(checkpoint.getTransformId(), checkpoint.getCheckpoint())) @@ -150,7 +151,7 @@ public class TransformConfigManager { public void updateTransformConfiguration(TransformConfig transformConfig, SeqNoPrimaryTermAndIndex seqNoPrimaryTermAndIndex, ActionListener listener) { - if (seqNoPrimaryTermAndIndex.getIndex().equals(TransformInternalIndex.LATEST_INDEX_NAME)) { + if (seqNoPrimaryTermAndIndex.getIndex().equals(TransformInternalIndexConstants.LATEST_INDEX_NAME)) { // update the config in the same, current index using optimistic concurrency control putTransformConfiguration(transformConfig, DocWriteRequest.OpType.INDEX, seqNoPrimaryTermAndIndex, listener); } else { @@ -167,9 +168,9 @@ public class TransformConfigManager { * @param listener listener to alert on completion */ public void deleteOldTransformConfigurations(String transformId, ActionListener listener) { - DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(TransformInternalIndex.INDEX_NAME_PATTERN) + DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.boolQuery() - .mustNot(QueryBuilders.termQuery("_index", TransformInternalIndex.LATEST_INDEX_NAME)) + .mustNot(QueryBuilders.termQuery("_index", TransformInternalIndexConstants.LATEST_INDEX_NAME)) .filter(QueryBuilders.termQuery("_id", TransformConfig.documentId(transformId))))) .setIndicesOptions(IndicesOptions.lenientExpandOpen()); @@ -194,9 +195,9 @@ public class TransformConfigManager { * @param listener listener to alert on completion */ public void deleteOldTransformStoredDocuments(String transformId, ActionListener listener) { - DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(TransformInternalIndex.INDEX_NAME_PATTERN) + DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.boolQuery() - .mustNot(QueryBuilders.termQuery("_index", TransformInternalIndex.LATEST_INDEX_NAME)) + .mustNot(QueryBuilders.termQuery("_index", TransformInternalIndexConstants.LATEST_INDEX_NAME)) .filter(QueryBuilders.termQuery("_id", TransformStoredDoc.documentId(transformId))))) .setIndicesOptions(IndicesOptions.lenientExpandOpen()); @@ -221,7 +222,7 @@ public class TransformConfigManager { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { XContentBuilder source = transformConfig.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); - IndexRequest indexRequest = new IndexRequest(TransformInternalIndex.LATEST_INDEX_NAME) + IndexRequest indexRequest = new IndexRequest(TransformInternalIndexConstants.LATEST_INDEX_NAME) .opType(optType) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .id(TransformConfig.documentId(transformConfig.getId())) @@ -260,7 +261,7 @@ public class TransformConfigManager { */ public void getTransformCheckpoint(String transformId, long checkpoint, ActionListener resultListener) { QueryBuilder queryBuilder = QueryBuilders.termQuery("_id", TransformCheckpoint.documentId(transformId, checkpoint)); - SearchRequest searchRequest = client.prepareSearch(TransformInternalIndex.INDEX_NAME_PATTERN) + SearchRequest searchRequest = client.prepareSearch(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .setQuery(queryBuilder) // use sort to get the last .addSort("_index", SortOrder.DESC) @@ -289,7 +290,7 @@ public class TransformConfigManager { */ public void getTransformConfiguration(String transformId, ActionListener resultListener) { QueryBuilder queryBuilder = QueryBuilders.termQuery("_id", TransformConfig.documentId(transformId)); - SearchRequest searchRequest = client.prepareSearch(TransformInternalIndex.INDEX_NAME_PATTERN) + SearchRequest searchRequest = client.prepareSearch(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .setQuery(queryBuilder) // use sort to get the last .addSort("_index", SortOrder.DESC) @@ -320,7 +321,7 @@ public class TransformConfigManager { ActionListener> configAndVersionListener) { QueryBuilder queryBuilder = QueryBuilders.termQuery("_id", TransformConfig.documentId(transformId)); - SearchRequest searchRequest = client.prepareSearch(TransformInternalIndex.INDEX_NAME_PATTERN) + SearchRequest searchRequest = client.prepareSearch(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .setQuery(queryBuilder) // use sort to get the last .addSort("_index", SortOrder.DESC) @@ -361,7 +362,7 @@ public class TransformConfigManager { String[] idTokens = ExpandedIdsMatcher.tokenizeExpression(transformIdsExpression); QueryBuilder queryBuilder = buildQueryFromTokenizedIds(idTokens, TransformConfig.NAME); - SearchRequest request = client.prepareSearch(TransformInternalIndex.INDEX_NAME_PATTERN) + SearchRequest request = client.prepareSearch(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .addSort(TransformField.ID.getPreferredName(), SortOrder.ASC) .setFrom(pageParams.getFrom()) .setTrackTotalHits(true) @@ -412,7 +413,7 @@ public class TransformConfigManager { DeleteByQueryRequest request = new DeleteByQueryRequest() .setAbortOnVersionConflict(false); //since these documents are not updated, a conflict just means it was deleted previously - request.indices(TransformInternalIndex.INDEX_NAME_PATTERN); + request.indices(TransformInternalIndexConstants.INDEX_NAME_PATTERN); QueryBuilder query = QueryBuilders.termQuery(TransformField.ID.getPreferredName(), transformId); request.setQuery(query); request.setRefresh(true); @@ -440,12 +441,12 @@ public class TransformConfigManager { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { XContentBuilder source = stats.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); - IndexRequest indexRequest = new IndexRequest(TransformInternalIndex.LATEST_INDEX_NAME) + IndexRequest indexRequest = new IndexRequest(TransformInternalIndexConstants.LATEST_INDEX_NAME) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .id(TransformStoredDoc.documentId(stats.getId())) .source(source); if (seqNoPrimaryTermAndIndex != null && - seqNoPrimaryTermAndIndex.getIndex().equals(TransformInternalIndex.LATEST_INDEX_NAME)) { + seqNoPrimaryTermAndIndex.getIndex().equals(TransformInternalIndexConstants.LATEST_INDEX_NAME)) { indexRequest.opType(DocWriteRequest.OpType.INDEX) .setIfSeqNo(seqNoPrimaryTermAndIndex.getSeqNo()) .setIfPrimaryTerm(seqNoPrimaryTermAndIndex.getPrimaryTerm()); @@ -471,7 +472,7 @@ public class TransformConfigManager { public void getTransformStoredDoc(String transformId, ActionListener> resultListener) { QueryBuilder queryBuilder = QueryBuilders.termQuery("_id", TransformStoredDoc.documentId(transformId)); - SearchRequest searchRequest = client.prepareSearch(TransformInternalIndex.INDEX_NAME_PATTERN) + SearchRequest searchRequest = client.prepareSearch(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .setQuery(queryBuilder) // use sort to get the last .addSort("_index", SortOrder.DESC) @@ -507,7 +508,7 @@ public class TransformConfigManager { .filter(QueryBuilders.termsQuery(TransformField.ID.getPreferredName(), transformIds)) .filter(QueryBuilders.termQuery(TransformField.INDEX_DOC_TYPE.getPreferredName(), TransformStoredDoc.NAME))); - SearchRequest searchRequest = client.prepareSearch(TransformInternalIndex.INDEX_NAME_PATTERN) + SearchRequest searchRequest = client.prepareSearch(TransformInternalIndexConstants.INDEX_NAME_PATTERN) .addSort(TransformField.ID.getPreferredName(), SortOrder.ASC) .addSort("_index", SortOrder.DESC) .setQuery(builder) diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndex.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndex.java index 3d614b3da5ff..2d6437ede18d 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndex.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndex.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats; import org.elasticsearch.xpack.core.transform.transforms.TransformProgress; import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import java.io.IOException; import java.util.Collections; @@ -45,7 +46,7 @@ public final class TransformInternalIndex { /* Changelog of internal index versions * - * Please list changes, increase the version if you are 1st in this release cycle + * Please list changes, increase the version in @link{TransformInternalIndexConstants} if you are 1st in this release cycle * * version 1 (7.2): initial * version 2 (7.4): cleanup, add config::version, config::create_time, checkpoint::timestamp, checkpoint::time_upper_bound, @@ -54,17 +55,6 @@ public final class TransformInternalIndex { * stats::exponential_avg_documents_processed */ - // constants for the index - public static final String INDEX_VERSION = "2"; - public static final String INDEX_PATTERN = ".data-frame-internal-"; - public static final String LATEST_INDEX_VERSIONED_NAME = INDEX_PATTERN + INDEX_VERSION; - public static final String LATEST_INDEX_NAME = LATEST_INDEX_VERSIONED_NAME; - public static final String INDEX_NAME_PATTERN = INDEX_PATTERN + "*"; - - public static final String AUDIT_TEMPLATE_VERSION = "1"; - public static final String AUDIT_INDEX_PREFIX = ".data-frame-notifications-"; - public static final String AUDIT_INDEX = AUDIT_INDEX_PREFIX + AUDIT_TEMPLATE_VERSION; - // constants for mappings public static final String DYNAMIC = "dynamic"; public static final String PROPERTIES = "properties"; @@ -82,8 +72,8 @@ public final class TransformInternalIndex { public static final String KEYWORD = "keyword"; public static IndexTemplateMetaData getIndexTemplateMetaData() throws IOException { - IndexTemplateMetaData transformTemplate = IndexTemplateMetaData.builder(LATEST_INDEX_VERSIONED_NAME) - .patterns(Collections.singletonList(LATEST_INDEX_VERSIONED_NAME)) + IndexTemplateMetaData transformTemplate = IndexTemplateMetaData.builder(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME) + .patterns(Collections.singletonList(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME)) .version(Version.CURRENT.id) .settings(Settings.builder() // the configurations are expected to be small @@ -95,8 +85,8 @@ public final class TransformInternalIndex { } public static IndexTemplateMetaData getAuditIndexTemplateMetaData() throws IOException { - IndexTemplateMetaData transformTemplate = IndexTemplateMetaData.builder(AUDIT_INDEX) - .patterns(Collections.singletonList(AUDIT_INDEX_PREFIX + "*")) + IndexTemplateMetaData transformTemplate = IndexTemplateMetaData.builder(TransformInternalIndexConstants.AUDIT_INDEX) + .patterns(Collections.singletonList(TransformInternalIndexConstants.AUDIT_INDEX_PREFIX + "*")) .version(Version.CURRENT.id) .settings(Settings.builder() // the audits are expected to be small @@ -318,7 +308,7 @@ public final class TransformInternalIndex { } public static boolean haveLatestVersionedIndexTemplate(ClusterState state) { - return state.getMetaData().getTemplates().containsKey(LATEST_INDEX_VERSIONED_NAME); + return state.getMetaData().getTemplates().containsKey(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME); } /** @@ -344,7 +334,7 @@ public final class TransformInternalIndex { try { IndexTemplateMetaData indexTemplateMetaData = getIndexTemplateMetaData(); BytesReference jsonMappings = new BytesArray(indexTemplateMetaData.mappings().get(SINGLE_MAPPING_NAME).uncompressed()); - PutIndexTemplateRequest request = new PutIndexTemplateRequest(LATEST_INDEX_VERSIONED_NAME) + PutIndexTemplateRequest request = new PutIndexTemplateRequest(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME) .patterns(indexTemplateMetaData.patterns()) .version(indexTemplateMetaData.version()) .settings(indexTemplateMetaData.settings()) diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformAction.java index d5ddef41d0c6..c329500b2066 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformAction.java @@ -13,7 +13,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.transform.TransformField; -import org.elasticsearch.xpack.core.transform.action.GetTransformsAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformAction; import static org.elasticsearch.xpack.core.transform.TransformField.ALLOW_NO_MATCH; @@ -26,7 +26,7 @@ public class RestGetTransformAction extends BaseRestHandler { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { - GetTransformsAction.Request request = new GetTransformsAction.Request(); + GetTransformAction.Request request = new GetTransformAction.Request(); String id = restRequest.param(TransformField.ID.getPreferredName()); request.setResourceId(id); @@ -36,7 +36,7 @@ public class RestGetTransformAction extends BaseRestHandler { new PageParams(restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM), restRequest.paramAsInt(PageParams.SIZE.getPreferredName(), PageParams.DEFAULT_SIZE))); } - return channel -> client.execute(GetTransformsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> client.execute(GetTransformAction.INSTANCE, request, new RestToXContentListener<>(channel)); } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformStatsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformStatsAction.java index 7fc8d2ba6566..61b8d60b3cc4 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformStatsAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/rest/action/RestGetTransformStatsAction.java @@ -13,7 +13,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.transform.TransformField; -import org.elasticsearch.xpack.core.transform.action.GetTransformsStatsAction; +import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction; import static org.elasticsearch.xpack.core.transform.TransformField.ALLOW_NO_MATCH; @@ -27,14 +27,14 @@ public class RestGetTransformStatsAction extends BaseRestHandler { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String id = restRequest.param(TransformField.ID.getPreferredName()); - GetTransformsStatsAction.Request request = new GetTransformsStatsAction.Request(id); + GetTransformStatsAction.Request request = new GetTransformStatsAction.Request(id); request.setAllowNoMatch(restRequest.paramAsBoolean(ALLOW_NO_MATCH.getPreferredName(), true)); if (restRequest.hasParam(PageParams.FROM.getPreferredName()) || restRequest.hasParam(PageParams.SIZE.getPreferredName())) { request.setPageParams( new PageParams(restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM), restRequest.paramAsInt(PageParams.SIZE.getPreferredName(), PageParams.DEFAULT_SIZE))); } - return channel -> client.execute(GetTransformsStatsAction.INSTANCE, request, + return channel -> client.execute(GetTransformStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java index 5616e828dd24..64b299182d26 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.core.transform.TransformField; import org.elasticsearch.xpack.core.transform.TransformMessages; import org.elasticsearch.xpack.core.transform.action.StartTransformAction; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformState; @@ -108,7 +109,7 @@ public class TransformPersistentTasksExecutor extends PersistentTasksExecutor unavailableIndices = new ArrayList<>(indices.length); for (String index : indices) { IndexRoutingTable routingTable = clusterState.getRoutingTable().index(index); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformSingleNodeTestCase.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformSingleNodeTestCase.java index 5cdbba8e1228..f505ca858688 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformSingleNodeTestCase.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/TransformSingleNodeTestCase.java @@ -15,7 +15,7 @@ import org.elasticsearch.index.reindex.ReindexPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.core.template.TemplateUtils; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.junit.Before; import java.util.Collection; @@ -32,7 +32,7 @@ public abstract class TransformSingleNodeTestCase extends ESSingleNodeTestCase { assertBusy(() -> { ClusterState state = client().admin().cluster().prepareState().get().getState(); assertTrue("Timed out waiting for the transform templates to be installed", TemplateUtils - .checkTemplateExistsAndVersionIsGTECurrentVersion(TransformInternalIndex.LATEST_INDEX_VERSIONED_NAME, state)); + .checkTemplateExistsAndVersionIsGTECurrentVersion(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME, state)); }); } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformConfigManagerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformConfigManagerTests.java index c4f1f94144cb..cfc66532e651 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformConfigManagerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformConfigManagerTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformConfigTests; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDocTests; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.transform.TransformSingleNodeTestCase; import org.junit.Before; @@ -36,8 +37,8 @@ import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.transform.persistence.TransformInternalIndex.mappings; import static org.elasticsearch.xpack.transform.persistence.TransformConfigManager.TO_XCONTENT_PARAMS; +import static org.elasticsearch.xpack.transform.persistence.TransformInternalIndex.mappings; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -261,7 +262,7 @@ public class TransformConfigManagerTests extends TransformSingleNodeTestCase { String transformId = "transform_test_stored_doc_create_read_update"; TransformStoredDoc storedDocs = TransformStoredDocTests.randomTransformStoredDoc(transformId); - SeqNoPrimaryTermAndIndex firstIndex = new SeqNoPrimaryTermAndIndex(0, 1, TransformInternalIndex.LATEST_INDEX_NAME); + SeqNoPrimaryTermAndIndex firstIndex = new SeqNoPrimaryTermAndIndex(0, 1, TransformInternalIndexConstants.LATEST_INDEX_NAME); assertAsync(listener -> transformConfigManager.putOrUpdateTransformStoredDoc(storedDocs, null, listener), firstIndex, @@ -272,7 +273,7 @@ public class TransformConfigManagerTests extends TransformSingleNodeTestCase { null, null); - SeqNoPrimaryTermAndIndex secondIndex = new SeqNoPrimaryTermAndIndex(1, 1, TransformInternalIndex.LATEST_INDEX_NAME); + SeqNoPrimaryTermAndIndex secondIndex = new SeqNoPrimaryTermAndIndex(1, 1, TransformInternalIndexConstants.LATEST_INDEX_NAME); TransformStoredDoc updated = TransformStoredDocTests.randomTransformStoredDoc(transformId); assertAsync(listener -> transformConfigManager.putOrUpdateTransformStoredDoc(updated, firstIndex, listener), secondIndex, @@ -297,7 +298,7 @@ public class TransformConfigManagerTests extends TransformSingleNodeTestCase { List expectedDocs = new ArrayList<>(); for (int i=0; i transformConfigManager.putTransformConfiguration(transformConfig, listener), true, null, null); assertThat(client().get(new GetRequest(oldIndex).id(docId)).actionGet().isExists(), is(true)); - assertThat(client().get(new GetRequest(TransformInternalIndex.LATEST_INDEX_NAME).id(docId)).actionGet().isExists(), is(true)); + assertThat(client().get(new GetRequest(TransformInternalIndexConstants.LATEST_INDEX_NAME).id(docId)).actionGet().isExists(), + is(true)); assertAsync(listener -> transformConfigManager.deleteOldTransformConfigurations(transformId, listener), true, null, null); - client().admin().indices().refresh(new RefreshRequest(TransformInternalIndex.INDEX_NAME_PATTERN)).actionGet(); + client().admin().indices().refresh(new RefreshRequest(TransformInternalIndexConstants.INDEX_NAME_PATTERN)).actionGet(); assertThat(client().get(new GetRequest(oldIndex).id(docId)).actionGet().isExists(), is(false)); - assertThat(client().get(new GetRequest(TransformInternalIndex.LATEST_INDEX_NAME).id(docId)).actionGet().isExists(), is(true)); + assertThat(client().get(new GetRequest(TransformInternalIndexConstants.LATEST_INDEX_NAME).id(docId)).actionGet().isExists(), + is(true)); } public void testDeleteOldTransformStoredDocuments() throws Exception { - String oldIndex = TransformInternalIndex.INDEX_PATTERN + "1"; + String oldIndex = TransformInternalIndexConstants.INDEX_PATTERN + "1"; String transformId = "transform_test_delete_old_stored_documents"; String docId = TransformStoredDoc.documentId(transformId); TransformStoredDoc transformStoredDoc = TransformStoredDocTests @@ -369,22 +372,24 @@ public class TransformConfigManagerTests extends TransformSingleNodeTestCase { assertAsync(listener -> transformConfigManager.putOrUpdateTransformStoredDoc(transformStoredDoc, new SeqNoPrimaryTermAndIndex(3, 1, oldIndex), listener), - new SeqNoPrimaryTermAndIndex(0, 1, TransformInternalIndex.LATEST_INDEX_NAME), + new SeqNoPrimaryTermAndIndex(0, 1, TransformInternalIndexConstants.LATEST_INDEX_NAME), null, null); - client().admin().indices().refresh(new RefreshRequest(TransformInternalIndex.INDEX_NAME_PATTERN)).actionGet(); + client().admin().indices().refresh(new RefreshRequest(TransformInternalIndexConstants.INDEX_NAME_PATTERN)).actionGet(); assertThat(client().get(new GetRequest(oldIndex).id(docId)).actionGet().isExists(), is(true)); - assertThat(client().get(new GetRequest(TransformInternalIndex.LATEST_INDEX_NAME).id(docId)).actionGet().isExists(), is(true)); + assertThat(client().get(new GetRequest(TransformInternalIndexConstants.LATEST_INDEX_NAME).id(docId)).actionGet().isExists(), + is(true)); assertAsync(listener -> transformConfigManager.deleteOldTransformStoredDocuments(transformId, listener), true, null, null); - client().admin().indices().refresh(new RefreshRequest(TransformInternalIndex.INDEX_NAME_PATTERN)).actionGet(); + client().admin().indices().refresh(new RefreshRequest(TransformInternalIndexConstants.INDEX_NAME_PATTERN)).actionGet(); assertThat(client().get(new GetRequest(oldIndex).id(docId)).actionGet().isExists(), is(false)); - assertThat(client().get(new GetRequest(TransformInternalIndex.LATEST_INDEX_NAME).id(docId)).actionGet().isExists(), is(true)); + assertThat(client().get(new GetRequest(TransformInternalIndexConstants.LATEST_INDEX_NAME).id(docId)).actionGet().isExists(), + is(true)); } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndexTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndexTests.java index 23249ca4fbf8..83f9b36c496a 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndexTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformInternalIndexTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import java.io.IOException; import java.io.UncheckedIOException; @@ -41,7 +42,7 @@ public class TransformInternalIndexTests extends ESTestCase { static { ImmutableOpenMap.Builder mapBuilder = ImmutableOpenMap.builder(); try { - mapBuilder.put(TransformInternalIndex.LATEST_INDEX_VERSIONED_NAME, TransformInternalIndex.getIndexTemplateMetaData()); + mapBuilder.put(TransformInternalIndexConstants.LATEST_INDEX_VERSIONED_NAME, TransformInternalIndex.getIndexTemplateMetaData()); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java index 528c49eb1dac..fc082a6cf394 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java @@ -195,7 +195,7 @@ public class TransformIndexerTests extends ESTestCase { when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); } - public void testPageSizeAdapt() throws InterruptedException { + public void testPageSizeAdapt() throws Exception { Integer pageSize = randomBoolean() ? null : randomIntBetween(500, 10_000); TransformConfig config = new TransformConfig(randomAlphaOfLength(10), randomSourceConfig(), @@ -232,8 +232,9 @@ public class TransformIndexerTests extends ESTestCase { assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); assertThat(indexer.getState(), equalTo(IndexerState.INDEXING)); + latch.countDown(); - awaitBusy(() -> indexer.getState() == IndexerState.STOPPED); + assertBusy(() -> assertThat(indexer.getState(), equalTo(IndexerState.STARTED))); long pageSizeAfterFirstReduction = indexer.getPageSize(); assertThat(initialPageSize, greaterThan(pageSizeAfterFirstReduction)); assertThat(pageSizeAfterFirstReduction, greaterThan((long)TransformIndexer.MINIMUM_PAGE_SIZE)); @@ -245,8 +246,9 @@ public class TransformIndexerTests extends ESTestCase { assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); assertThat(indexer.getState(), equalTo(IndexerState.INDEXING)); + secondRunLatch.countDown(); - awaitBusy(() -> indexer.getState() == IndexerState.STOPPED); + assertBusy(() -> assertThat(indexer.getState(), equalTo(IndexerState.STARTED))); // assert that page size has been reduced again assertThat(pageSizeAfterFirstReduction, greaterThan((long)indexer.getPageSize())); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java index 75bf84c51ae8..542ca1ba5822 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java @@ -31,9 +31,9 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; +import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.transform.checkpoint.TransformCheckpointService; import org.elasticsearch.xpack.transform.notifications.TransformAuditor; -import org.elasticsearch.xpack.transform.persistence.TransformInternalIndex; import org.elasticsearch.xpack.transform.persistence.TransformInternalIndexTests; import org.elasticsearch.xpack.transform.persistence.TransformConfigManager; @@ -136,7 +136,7 @@ public class TransformPersistentTasksExecutorTests extends ESTestCase { metaData = new MetaData.Builder(cs.metaData()); routingTable = new RoutingTable.Builder(cs.routingTable()); - String indexToRemove = TransformInternalIndex.LATEST_INDEX_NAME; + String indexToRemove = TransformInternalIndexConstants.LATEST_INDEX_NAME; if (randomBoolean()) { routingTable.remove(indexToRemove); } else { @@ -158,8 +158,8 @@ public class TransformPersistentTasksExecutorTests extends ESTestCase { private void addIndices(MetaData.Builder metaData, RoutingTable.Builder routingTable) { List indices = new ArrayList<>(); - indices.add(TransformInternalIndex.AUDIT_INDEX); - indices.add(TransformInternalIndex.LATEST_INDEX_NAME); + indices.add(TransformInternalIndexConstants.AUDIT_INDEX); + indices.add(TransformInternalIndexConstants.LATEST_INDEX_NAME); for (String indexName : indices) { IndexMetaData.Builder indexMetaData = IndexMetaData.builder(indexName); indexMetaData.settings(Settings.builder() diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java index e67512ee694c..375cfa2ef7ae 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java @@ -36,7 +36,6 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; @@ -180,7 +179,7 @@ public class WatcherServiceTests extends ESTestCase { SearchHit[] hits = new SearchHit[count]; for (int i = 0; i < count; i++) { String id = String.valueOf(i); - SearchHit hit = new SearchHit(1, id, new Text("watch"), Collections.emptyMap()); + SearchHit hit = new SearchHit(1, id, Collections.emptyMap()); hit.version(1L); hit.shard(new SearchShardTarget("nodeId", new ShardId(watchIndex, 0), "whatever", OriginalIndices.NONE)); hits[i] = hit; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java index 8824af4a8762..88d95c37cc4f 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java @@ -9,7 +9,6 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHit; @@ -76,7 +75,7 @@ public class CompareConditionSearchTests extends AbstractWatcherIntegrationTestC public void testExecuteAccessHits() throws Exception { CompareCondition condition = new CompareCondition("ctx.payload.hits.hits.0._score", CompareCondition.Op.EQ, 1, Clock.systemUTC()); - SearchHit hit = new SearchHit(0, "1", new Text("type"), null); + SearchHit hit = new SearchHit(0, "1", null); hit.score(1f); hit.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null, OriginalIndices.NONE)); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java index 13d46c4d97f2..f520dead2ee6 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java @@ -40,7 +40,6 @@ import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -80,7 +79,6 @@ import java.util.List; import java.util.Map; import static java.util.Collections.singleton; -import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -215,7 +213,7 @@ public class TriggeredWatchStoreTests extends ESTestCase { when(searchResponse1.getSuccessfulShards()).thenReturn(1); when(searchResponse1.getTotalShards()).thenReturn(1); BytesArray source = new BytesArray("{}"); - SearchHit hit = new SearchHit(0, "first_foo", new Text(SINGLE_MAPPING_NAME), null); + SearchHit hit = new SearchHit(0, "first_foo", null); hit.version(1L); hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null, OriginalIndices.NONE)); hit.sourceRef(source); @@ -229,7 +227,7 @@ public class TriggeredWatchStoreTests extends ESTestCase { }).when(client).execute(eq(SearchAction.INSTANCE), any(), any()); // First return a scroll response with a single hit and then with no hits - hit = new SearchHit(0, "second_foo", new Text(SINGLE_MAPPING_NAME), null); + hit = new SearchHit(0, "second_foo", null); hit.version(1L); hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null, OriginalIndices.NONE)); hit.sourceRef(source); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/activate/ActivateWatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/activate/ActivateWatchTests.java index e58c5edcb194..380e8401f01a 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/activate/ActivateWatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/activate/ActivateWatchTests.java @@ -89,8 +89,10 @@ public class ActivateWatchTests extends AbstractWatcherIntegrationTestCase { long count1 = docCount(".watcher-history*", matchAllQuery()); refresh(); - //ensure no new watch history - awaitBusy(() -> count1 != docCount(".watcher-history*", matchAllQuery()), 5, TimeUnit.SECONDS); + // Ensure no new watch history. The assertion ought to always return false, but if it returns true + // then we know that more history has been written. + boolean hasNewHistory = waitUntil(() -> count1 != docCount(".watcher-history*", matchAllQuery()), 5, TimeUnit.SECONDS); + assertFalse("Watcher should have stopped executing but new history found", hasNewHistory); // lets activate it again logger.info("Activating watch again"); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/TriggerServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/TriggerServiceTests.java index 6372823d36d9..1e1dd6050b48 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/TriggerServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/TriggerServiceTests.java @@ -178,10 +178,4 @@ public class TriggerServiceTests extends ESTestCase { newActions.add(actionWrapper); when(watch.actions()).thenReturn(newActions); } - - private void setTransform(Watch watch, String type) { - ExecutableTransform transform = mock(ExecutableTransform.class); - when(transform.type()).thenReturn(type); - when(watch.transform()).thenReturn(transform); - } -} \ No newline at end of file +} diff --git a/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServer.java b/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServer.java index ec94af9b75fc..3535dd362461 100644 --- a/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServer.java +++ b/x-pack/qa/evil-tests/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/SimpleKdcLdapServer.java @@ -35,6 +35,9 @@ import java.util.concurrent.TimeUnit; import javax.net.ServerSocketFactory; +import static org.elasticsearch.test.ESTestCase.assertBusy; +import static org.junit.Assert.assertTrue; + /** * Utility wrapper around Apache {@link SimpleKdcServer} backed by Unboundid * {@link InMemoryDirectoryServer}.
    @@ -90,9 +93,7 @@ public class SimpleKdcLdapServer { AccessController.doPrivileged(new PrivilegedExceptionAction() { @Override public Void run() throws Exception { - if (ESTestCase.awaitBusy(() -> init()) == false) { - throw new IllegalStateException("could not initialize SimpleKdcLdapServer"); - } + assertBusy(() -> assertTrue("Failed to initialize SimpleKdcLdapServer", init())); return null; } }); @@ -218,7 +219,7 @@ public class SimpleKdcLdapServer { /** * Stop Simple Kdc Server - * + * * @throws PrivilegedActionException when privileged action threw exception */ public synchronized void stop() throws PrivilegedActionException { diff --git a/x-pack/qa/kerberos-tests/build.gradle b/x-pack/qa/kerberos-tests/build.gradle index de72a9c80cce..10aae750dd2c 100644 --- a/x-pack/qa/kerberos-tests/build.gradle +++ b/x-pack/qa/kerberos-tests/build.gradle @@ -1,13 +1,12 @@ import java.nio.file.Path import java.nio.file.Paths -import java.nio.file.Files apply plugin: 'elasticsearch.testclusters' apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.test.fixtures' -testFixtures.useFixture ":test:fixtures:krb5kdc-fixture" +testFixtures.useFixture ":test:fixtures:krb5kdc-fixture", "peppa" dependencies { testCompile project(':x-pack:plugin:core') diff --git a/x-pack/qa/oidc-op-tests/build.gradle b/x-pack/qa/oidc-op-tests/build.gradle index 0ff52b7d3c5c..292a1c3ee0c9 100644 --- a/x-pack/qa/oidc-op-tests/build.gradle +++ b/x-pack/qa/oidc-op-tests/build.gradle @@ -13,7 +13,7 @@ dependencies { } testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') } -testFixtures.useFixture ":x-pack:test:idp-fixture" +testFixtures.useFixture ":x-pack:test:idp-fixture", "oidc-provider" String ephemeralPort; task setupPorts { diff --git a/x-pack/qa/openldap-tests/build.gradle b/x-pack/qa/openldap-tests/build.gradle index 0f1d728e9b8a..f32b64e3ec0a 100644 --- a/x-pack/qa/openldap-tests/build.gradle +++ b/x-pack/qa/openldap-tests/build.gradle @@ -10,7 +10,7 @@ dependencies { } } -testFixtures.useFixture ":x-pack:test:idp-fixture" +testFixtures.useFixture ":x-pack:test:idp-fixture", "openldap" Project idpFixtureProject = xpackProject("test:idp-fixture") String outputDir = "${project.buildDir}/generated-resources/${project.name}" diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java index 64c3a785d14e..7e88c327127a 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java @@ -6,15 +6,18 @@ package org.elasticsearch.upgrades; import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.xpack.test.SecuritySettingsSourceField; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.test.SecuritySettingsSourceField; import org.junit.Before; -import java.io.IOException; import java.util.Collection; import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; import static org.elasticsearch.xpack.test.SecuritySettingsSourceField.basicAuthHeaderValue; @@ -77,27 +80,31 @@ public abstract class AbstractUpgradeTestCase extends ESRestTestCase { } protected Collection templatesToWaitFor() { - return Collections.singletonList("security-index-template"); + return Collections.emptyList(); } @Before public void setupForTests() throws Exception { - awaitBusy(() -> { - boolean success = true; - for (String template : templatesToWaitFor()) { - try { - final Request headRequest = new Request("HEAD", "_template/" + template); - headRequest.setOptions(allowTypesRemovalWarnings()); - final boolean exists = adminClient() - .performRequest(headRequest) - .getStatusLine().getStatusCode() == 200; - success &= exists; - logger.debug("template [{}] exists [{}]", template, exists); - } catch (IOException e) { - logger.warn("error calling template api", e); - } + final Collection expectedTemplates = templatesToWaitFor(); + + if (expectedTemplates.isEmpty()) { + return; + } + + assertBusy(() -> { + final Request catRequest = new Request("GET", "_cat/templates?h=n&s=n"); + final Response catResponse = adminClient().performRequest(catRequest); + + final List templates = Streams.readAllLines(catResponse.getEntity().getContent()); + + final List missingTemplates = expectedTemplates.stream() + .filter(each -> templates.contains(each) == false) + .collect(Collectors.toList()); + + // While it's possible to use a Hamcrest matcher for this, the failure is much less legible. + if (missingTemplates.isEmpty() == false) { + fail("Some expected templates are missing: " + missingTemplates + ". The templates that exist are: " + templates + ""); } - return success; }); } } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataFrameSurvivesUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataFrameSurvivesUpgradeIT.java index c689167946ff..a41cba32058e 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataFrameSurvivesUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataFrameSurvivesUpgradeIT.java @@ -11,9 +11,9 @@ import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.core.IndexerState; -import org.elasticsearch.client.transform.GetDataFrameTransformStatsResponse; -import org.elasticsearch.client.transform.transforms.DataFrameTransformConfig; -import org.elasticsearch.client.transform.transforms.DataFrameTransformStats; +import org.elasticsearch.client.transform.GetTransformStatsResponse; +import org.elasticsearch.client.transform.transforms.TransformConfig; +import org.elasticsearch.client.transform.transforms.TransformStats; import org.elasticsearch.client.transform.transforms.DestConfig; import org.elasticsearch.client.transform.transforms.SourceConfig; import org.elasticsearch.client.transform.transforms.TimeSyncConfig; @@ -129,7 +129,7 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { totalDocsWrittenSum += docs * ENTITIES.size(); } long totalDocsWritten = totalDocsWrittenSum; - DataFrameTransformConfig config = DataFrameTransformConfig.builder() + TransformConfig config = TransformConfig.builder() .setSyncConfig(new TimeSyncConfig("timestamp", TimeValue.timeValueSeconds(1))) .setPivotConfig(PivotConfig.builder() .setAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.avg("stars").field("stars"))) @@ -146,12 +146,12 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { waitUntilAfterCheckpoint(CONTINUOUS_DATA_FRAME_ID, 0L); assertBusy(() -> { - DataFrameTransformStats stateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); + TransformStats stateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); assertThat(stateAndStats.getIndexerStats().getOutputDocuments(), equalTo((long)ENTITIES.size())); assertThat(stateAndStats.getIndexerStats().getNumDocuments(), equalTo(totalDocsWritten)); // Even if we get back to started, we may periodically get set back to `indexing` when triggered. // Though short lived due to no changes on the source indices, it could result in flaky test behavior - assertThat(stateAndStats.getState(), oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING)); + assertThat(stateAndStats.getState(), oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING)); }, 120, TimeUnit.SECONDS); @@ -165,13 +165,13 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { // A continuous data frame should automatically become started when it gets assigned to a node // if it was assigned to the node that was removed from the cluster assertBusy(() -> { - DataFrameTransformStats stateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); - assertThat(stateAndStats.getState(), oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING)); + TransformStats stateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); + assertThat(stateAndStats.getState(), oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING)); }, 120, TimeUnit.SECONDS); - DataFrameTransformStats previousStateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); + TransformStats previousStateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); // Add a new user and write data to it // This is so we can have more reliable data counts, as writing to existing entities requires @@ -190,10 +190,10 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { greaterThanOrEqualTo(docs + previousStateAndStats.getIndexerStats().getNumDocuments())), 120, TimeUnit.SECONDS); - DataFrameTransformStats stateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); + TransformStats stateAndStats = getTransformStats(CONTINUOUS_DATA_FRAME_ID); assertThat(stateAndStats.getState(), - oneOf(DataFrameTransformStats.State.STARTED, DataFrameTransformStats.State.INDEXING)); + oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING)); awaitWrittenIndexerState(CONTINUOUS_DATA_FRAME_ID, (responseBody) -> { Map indexerStats = (Map)((List)XContentMapValues.extractValue("hits.hits._source.stats", responseBody)) @@ -245,7 +245,7 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { }); } - private void putTransform(String id, DataFrameTransformConfig config) throws IOException { + private void putTransform(String id, TransformConfig config) throws IOException { final Request createDataframeTransformRequest = new Request("PUT", DATAFRAME_ENDPOINT + id); createDataframeTransformRequest.setJsonEntity(Strings.toString(config)); Response response = client().performRequest(createDataframeTransformRequest); @@ -270,7 +270,7 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { assertEquals(200, response.getStatusLine().getStatusCode()); } - private DataFrameTransformStats getTransformStats(String id) throws IOException { + private TransformStats getTransformStats(String id) throws IOException { final Request getStats = new Request("GET", DATAFRAME_ENDPOINT + id + "/_stats"); Response response = client().performRequest(getStats); assertEquals(200, response.getStatusLine().getStatusCode()); @@ -278,7 +278,7 @@ public class DataFrameSurvivesUpgradeIT extends AbstractUpgradeTestCase { try (XContentParser parser = xContentType.xContent().createParser( NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, response.getEntity().getContent())) { - GetDataFrameTransformStatsResponse resp = GetDataFrameTransformStatsResponse.fromXContent(parser); + GetTransformStatsResponse resp = GetTransformStatsResponse.fromXContent(parser); assertThat(resp.getTransformsStats(), hasSize(1)); return resp.getTransformsStats().get(0); } diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/80_data_frame_jobs_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/80_data_frame_jobs_crud.yml index bac5a6a9e993..713a66a5ff60 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/80_data_frame_jobs_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/80_data_frame_jobs_crud.yml @@ -2,16 +2,16 @@ "Test put batch data frame transforms on mixed cluster": - do: cluster.health: - index: "dataframe-transform-airline-data" + index: "transform-airline-data" wait_for_status: green timeout: 70s - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "mixed-simple-transform" body: > { - "source": { "index": "dataframe-transform-airline-data" }, + "source": { "index": "transform-airline-data" }, "dest": { "index": "mixed-simple-transform-idx" }, "pivot": { "group_by": { "airline": {"terms": {"field": "airline"}}}, @@ -21,36 +21,36 @@ - match: { acknowledged: true } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "mixed-simple-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-transform" } - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "mixed-simple-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-transform" } - match: { transforms.0.state: "stopped" } - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "mixed-complex-transform" body: > { "source": { - "index": "dataframe-transform-airline-data", + "index": "transform-airline-data", "query": { "bool": { "filter": {"term": {"airline": "ElasticAir"}} @@ -73,7 +73,7 @@ - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "mixed-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-complex-transform" } @@ -81,24 +81,24 @@ - is_true: transforms.0.create_time - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "mixed-complex-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-complex-transform" } - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "mixed-complex-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-complex-transform" } @@ -108,16 +108,16 @@ "Test put continuous data frame transform on mixed cluster": - do: cluster.health: - index: "dataframe-transform-airline-data-cont" + index: "transform-airline-data-cont" wait_for_status: green timeout: 70s - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "mixed-simple-continuous-transform" body: > { - "source": { "index": "dataframe-transform-airline-data-cont" }, + "source": { "index": "transform-airline-data-cont" }, "dest": { "index": "mixed-simple-continuous-transform-idx" }, "pivot": { "group_by": { "airline": {"terms": {"field": "airline"}}}, @@ -133,7 +133,7 @@ - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "mixed-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-continuous-transform" } @@ -143,24 +143,24 @@ - is_true: transforms.0.create_time - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "mixed-simple-continuous-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-continuous-transform" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "mixed-simple-continuous-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-continuous-transform" } @@ -170,49 +170,49 @@ "Test GET, start, and stop old cluster batch transforms": - do: cluster.health: - index: "dataframe-transform-airline-data" + index: "transform-airline-data" wait_for_status: green timeout: 70s - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - - match: { transforms.0.source.index.0: "dataframe-transform-airline-data" } + - match: { transforms.0.source.index.0: "transform-airline-data" } - match: { transforms.0.dest.index: "old-simple-transform-idx" } - match: { transforms.0.pivot.group_by.airline.terms.field: "airline" } - match: { transforms.0.pivot.aggregations.avg_response.avg.field: "responsetime" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-simple-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-simple-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - match: { transforms.0.state: "stopped" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } - - match: { transforms.0.source.index.0: "dataframe-transform-airline-data" } + - match: { transforms.0.source.index.0: "transform-airline-data" } - match: { transforms.0.dest.index: "old-complex-transform-idx" } - match: { transforms.0.dest.pipeline: "data_frame_simple_pipeline" } - match: { transforms.0.pivot.group_by.airline.terms.field: "airline" } @@ -221,23 +221,23 @@ - match: { transforms.0.pivot.aggregations.avg_response.avg.field: "responsetime" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-complex-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-complex-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } @@ -247,12 +247,12 @@ "Test GET, stop, start, old continuous transforms": - do: cluster.health: - index: "dataframe-transform-airline-data-cont" + index: "transform-airline-data-cont" wait_for_status: green timeout: 70s - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } @@ -262,24 +262,24 @@ - is_true: transforms.0.create_time - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-simple-continuous-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-simple-continuous-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/80_data_frame_jobs_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/80_data_frame_jobs_crud.yml index bf2b1f6b939f..bc50362c0d8d 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/80_data_frame_jobs_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/80_data_frame_jobs_crud.yml @@ -2,7 +2,7 @@ "Test put batch data frame transforms on old cluster": - do: indices.create: - index: dataframe-transform-airline-data + index: transform-airline-data body: mappings: properties: @@ -16,7 +16,7 @@ type: integer - do: cluster.health: - index: "dataframe-transform-airline-data" + index: "transform-airline-data" wait_for_status: green timeout: 70s @@ -35,11 +35,11 @@ ] } - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "old-simple-transform" body: > { - "source": { "index": "dataframe-transform-airline-data" }, + "source": { "index": "transform-airline-data" }, "dest": { "index": "old-simple-transform-idx" }, "pivot": { "group_by": { "airline": {"terms": {"field": "airline"}}}, @@ -49,7 +49,7 @@ - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } @@ -57,34 +57,34 @@ - is_true: transforms.0.create_time - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-simple-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-simple-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "old-complex-transform" body: > { "source": { - "index": "dataframe-transform-airline-data", + "index": "transform-airline-data", "query": { "bool": { "filter": {"term": {"airline": "ElasticAir"}} @@ -107,29 +107,29 @@ - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-complex-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-complex-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } @@ -138,7 +138,7 @@ "Test put continuous data frame transform on old cluster": - do: indices.create: - index: dataframe-transform-airline-data-cont + index: transform-airline-data-cont body: mappings: properties: @@ -152,16 +152,16 @@ type: integer - do: cluster.health: - index: "dataframe-transform-airline-data-cont" + index: "transform-airline-data-cont" wait_for_status: green timeout: 70s - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "old-simple-continuous-transform" body: > { - "source": { "index": "dataframe-transform-airline-data-cont" }, + "source": { "index": "transform-airline-data-cont" }, "dest": { "index": "old-simple-continuous-transform-idx" }, "pivot": { "group_by": { "airline": {"terms": {"field": "airline"}}}, @@ -177,7 +177,7 @@ - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } @@ -187,23 +187,23 @@ - is_true: transforms.0.create_time - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-simple-continuous-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-simple-continuous-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/80_data_frame_jobs_crud.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/80_data_frame_jobs_crud.yml index 4d9219962586..9f21fda1c0fa 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/80_data_frame_jobs_crud.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/80_data_frame_jobs_crud.yml @@ -9,43 +9,43 @@ setup: "Get start, stop, and delete old and mixed cluster batch data frame transforms": # Simple and complex OLD transforms - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - - match: { transforms.0.source.index.0: "dataframe-transform-airline-data" } + - match: { transforms.0.source.index.0: "transform-airline-data" } - match: { transforms.0.dest.index: "old-simple-transform-idx" } - match: { transforms.0.pivot.group_by.airline.terms.field: "airline" } - match: { transforms.0.pivot.aggregations.avg_response.avg.field: "responsetime" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-simple-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-simple-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-transform" } - match: { transforms.0.state: "stopped" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } - - match: { transforms.0.source.index.0: "dataframe-transform-airline-data" } + - match: { transforms.0.source.index.0: "transform-airline-data" } - match: { transforms.0.dest.index: "old-complex-transform-idx" } - match: { transforms.0.dest.pipeline: "data_frame_simple_pipeline" } - match: { transforms.0.pivot.group_by.airline.terms.field: "airline" } @@ -54,23 +54,23 @@ setup: - match: { transforms.0.pivot.aggregations.avg_response.avg.field: "responsetime" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-complex-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-complex-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "old-complex-transform" } @@ -78,44 +78,44 @@ setup: # Simple and complex Mixed cluster transforms - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "mixed-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-transform" } - - match: { transforms.0.source.index.0: "dataframe-transform-airline-data" } + - match: { transforms.0.source.index.0: "transform-airline-data" } - match: { transforms.0.dest.index: "mixed-simple-transform-idx" } - match: { transforms.0.pivot.group_by.airline.terms.field: "airline" } - match: { transforms.0.pivot.aggregations.avg_response.avg.field: "responsetime" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "mixed-simple-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-transform" } - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "mixed-simple-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-transform" } - match: { transforms.0.state: "stopped" } - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "mixed-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-complex-transform" } - - match: { transforms.0.source.index.0: "dataframe-transform-airline-data" } + - match: { transforms.0.source.index.0: "transform-airline-data" } - match: { transforms.0.dest.index: "mixed-complex-transform-idx" } - match: { transforms.0.dest.pipeline: "data_frame_simple_pipeline" } - match: { transforms.0.pivot.group_by.airline.terms.field: "airline" } @@ -124,23 +124,23 @@ setup: - match: { transforms.0.pivot.aggregations.avg_response.avg.field: "responsetime" } - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "mixed-complex-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-complex-transform" } - match: { transforms.0.state: "/started|indexing|stopping|stopped/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "mixed-complex-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-complex-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-complex-transform" } @@ -148,22 +148,22 @@ setup: # Delete all old and mixed transforms - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "old-simple-transform" - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "mixed-simple-transform" - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-transform,mixed-simple-transform" - match: { count: 0 } --- "Test GET, stop, delete, old and mixed continuous transforms": - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } @@ -173,42 +173,42 @@ setup: - is_true: transforms.0.create_time - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "old-simple-continuous-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "old-simple-continuous-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "old-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "old-simple-continuous-transform" } - match: { transforms.0.state: "stopped" } - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "old-simple-continuous-transform" - do: - data_frame.get_data_frame_transform: + transform.get_transform: transform_id: "mixed-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-continuous-transform" } @@ -218,49 +218,49 @@ setup: - is_true: transforms.0.create_time - do: - data_frame.start_data_frame_transform: + transform.start_transform: transform_id: "mixed-simple-continuous-transform" - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-continuous-transform" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-continuous-transform" } - match: { transforms.0.state: "/started|indexing/" } - do: - data_frame.stop_data_frame_transform: + transform.stop_transform: transform_id: "mixed-simple-continuous-transform" wait_for_completion: true - match: { acknowledged: true } - do: - data_frame.get_data_frame_transform_stats: + transform.get_transform_stats: transform_id: "mixed-simple-continuous-transform" - match: { count: 1 } - match: { transforms.0.id: "mixed-simple-continuous-transform" } - match: { transforms.0.state: "stopped" } - do: - data_frame.delete_data_frame_transform: + transform.delete_transform: transform_id: "mixed-simple-continuous-transform" --- "Test index mappings for latest internal index": - do: - data_frame.put_data_frame_transform: + transform.put_transform: transform_id: "upgraded-simple-transform" defer_validation: true body: > { - "source": { "index": "dataframe-transform-airline-data" }, + "source": { "index": "transform-airline-data" }, "dest": { "index": "upgraded-simple-transform-idx" }, "pivot": { "group_by": { "airline": {"terms": {"field": "airline"}}}, diff --git a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java index ee98c38bc9cd..9a807ac9dfde 100644 --- a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java +++ b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java @@ -21,7 +21,6 @@ public final class XPackRestTestConstants { // ML constants: public static final String ML_META_INDEX_NAME = ".ml-meta"; - public static final String AUDITOR_NOTIFICATIONS_INDEX = ".ml-notifications-000001"; public static final String CONFIG_INDEX = ".ml-config"; public static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-"; public static final String STATE_INDEX_PREFIX = ".ml-state"; @@ -29,14 +28,13 @@ public final class XPackRestTestConstants { public static final List ML_POST_V660_TEMPLATES = List.of( - AUDITOR_NOTIFICATIONS_INDEX, ML_META_INDEX_NAME, STATE_INDEX_PREFIX, RESULTS_INDEX_PREFIX, CONFIG_INDEX); // Data Frame constants: - public static final String DATA_FRAME_INTERNAL_INDEX = ".data-frame-internal-1"; + public static final String DATA_FRAME_INTERNAL_INDEX = ".data-frame-internal-2"; public static final String DATA_FRAME_NOTIFICATIONS_INDEX = ".data-frame-notifications-1"; public static final List DATA_FRAME_TEMPLATES = List.of(DATA_FRAME_INTERNAL_INDEX, DATA_FRAME_NOTIFICATIONS_INDEX); diff --git a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java index 6ad16d512ef0..5db554088b1e 100644 --- a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java +++ b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java @@ -5,22 +5,24 @@ */ package org.elasticsearch.xpack.test.rest; - import org.apache.http.util.EntityUtils; import org.elasticsearch.Version; import org.elasticsearch.client.Request; -import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.TreeSet; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; -import static org.elasticsearch.test.rest.ESRestTestCase.allowTypesRemovalWarnings; +import static org.elasticsearch.test.ESTestCase.assertBusy; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.fail; public final class XPackRestTestHelper { @@ -31,50 +33,52 @@ public final class XPackRestTestHelper { * For each template name wait for the template to be created and * for the template version to be equal to the master node version. * - * @param client The rest client - * @param templateNames Names of the templates to wait for + * @param client The rest client + * @param expectedTemplates Names of the templates to wait for * @throws InterruptedException If the wait is interrupted */ - public static void waitForTemplates(RestClient client, List templateNames) throws InterruptedException { + public static void waitForTemplates(RestClient client, List expectedTemplates) throws Exception { AtomicReference masterNodeVersion = new AtomicReference<>(); - ESTestCase.awaitBusy(() -> { - String response; - try { - Request request = new Request("GET", "/_cat/nodes"); - request.addParameter("h", "master,version"); - response = EntityUtils.toString(client.performRequest(request).getEntity()); - } catch (IOException e) { - throw new RuntimeException(e); - } + + assertBusy(() -> { + Request request = new Request("GET", "/_cat/nodes"); + request.addParameter("h", "master,version"); + String response = EntityUtils.toString(client.performRequest(request).getEntity()); + for (String line : response.split("\n")) { if (line.startsWith("*")) { masterNodeVersion.set(Version.fromString(line.substring(2).trim())); - return true; + return; } } - return false; + fail("No master elected"); }); - for (String template : templateNames) { - ESTestCase.awaitBusy(() -> { - Map response; - try { - final Request getRequest = new Request("GET", "_template/" + template); - getRequest.setOptions(allowTypesRemovalWarnings()); - String string = EntityUtils.toString(client.performRequest(getRequest).getEntity()); - response = XContentHelper.convertToMap(JsonXContent.jsonXContent, string, false); - } catch (ResponseException e) { - if (e.getResponse().getStatusLine().getStatusCode() == 404) { - return false; - } - throw new RuntimeException(e); - } catch (IOException e) { - throw new RuntimeException(e); - } + assertBusy(() -> { + final Request request = new Request("GET", "_template"); + + String string = EntityUtils.toString(client.performRequest(request).getEntity()); + Map response = XContentHelper.convertToMap(JsonXContent.jsonXContent, string, false); + + final Set templates = new TreeSet<>(response.keySet()); + + final List missingTemplates = expectedTemplates.stream() + .filter(each -> templates.contains(each) == false) + .collect(Collectors.toList()); + + // While it's possible to use a Hamcrest matcher for this, the failure is much less legible. + if (missingTemplates.isEmpty() == false) { + fail("Some expected templates are missing: " + missingTemplates + ". The templates that exist are: " + templates + ""); + } + + expectedTemplates.forEach(template -> { Map templateDefinition = (Map) response.get(template); - return Version.fromId((Integer) templateDefinition.get("version")).equals(masterNodeVersion.get()); + assertThat( + "Template [" + template + "] has unexpected version", + Version.fromId((Integer) templateDefinition.get("version")), + equalTo(masterNodeVersion.get())); }); - } + }); } public static String resultsWriteAlias(String jobId) { diff --git a/x-pack/qa/third-party/active-directory/build.gradle b/x-pack/qa/third-party/active-directory/build.gradle index 2d4af2b46bbc..b76b25b08eae 100644 --- a/x-pack/qa/third-party/active-directory/build.gradle +++ b/x-pack/qa/third-party/active-directory/build.gradle @@ -15,7 +15,7 @@ processTestResources { compileTestJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" -// we have to repeat these patterns because the security test resources are effectively in the src of this project +// we have to repeat these patterns because the security test resources are effectively in the src of this p forbiddenPatterns { exclude '**/*.key' exclude '**/*.p12' diff --git a/x-pack/snapshot-tool/licenses/aws-java-sdk-core-1.11.562.jar.sha1 b/x-pack/snapshot-tool/licenses/aws-java-sdk-core-1.11.562.jar.sha1 deleted file mode 100644 index ed8ded6a3608..000000000000 --- a/x-pack/snapshot-tool/licenses/aws-java-sdk-core-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b5fc47ec1b5afe180f5ebb4eda755acdca7a20ae \ No newline at end of file diff --git a/x-pack/snapshot-tool/licenses/aws-java-sdk-core-1.11.636.jar.sha1 b/x-pack/snapshot-tool/licenses/aws-java-sdk-core-1.11.636.jar.sha1 new file mode 100644 index 000000000000..b9ee9c102dbc --- /dev/null +++ b/x-pack/snapshot-tool/licenses/aws-java-sdk-core-1.11.636.jar.sha1 @@ -0,0 +1 @@ +84c9f180f8f60f6f1433c9c5253fcb704593b121 \ No newline at end of file diff --git a/x-pack/snapshot-tool/licenses/aws-java-sdk-s3-1.11.562.jar.sha1 b/x-pack/snapshot-tool/licenses/aws-java-sdk-s3-1.11.562.jar.sha1 deleted file mode 100644 index 8e852fe9b275..000000000000 --- a/x-pack/snapshot-tool/licenses/aws-java-sdk-s3-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1712c878f7e9483ceac1eb2356a9457a3c8df03e \ No newline at end of file diff --git a/x-pack/snapshot-tool/licenses/aws-java-sdk-s3-1.11.636.jar.sha1 b/x-pack/snapshot-tool/licenses/aws-java-sdk-s3-1.11.636.jar.sha1 new file mode 100644 index 000000000000..1e05e98d240d --- /dev/null +++ b/x-pack/snapshot-tool/licenses/aws-java-sdk-s3-1.11.636.jar.sha1 @@ -0,0 +1 @@ +f86fc1993ac8122f6f02a8eb9b467b5f945cd76b \ No newline at end of file diff --git a/x-pack/snapshot-tool/licenses/jmespath-java-1.11.562.jar.sha1 b/x-pack/snapshot-tool/licenses/jmespath-java-1.11.562.jar.sha1 deleted file mode 100644 index 8e2d0e1935a3..000000000000 --- a/x-pack/snapshot-tool/licenses/jmespath-java-1.11.562.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1147ed0ad1f2c5a16b8271e38e3cda5cd488c8ae \ No newline at end of file diff --git a/x-pack/snapshot-tool/licenses/jmespath-java-1.11.636.jar.sha1 b/x-pack/snapshot-tool/licenses/jmespath-java-1.11.636.jar.sha1 new file mode 100644 index 000000000000..70c0d3633af0 --- /dev/null +++ b/x-pack/snapshot-tool/licenses/jmespath-java-1.11.636.jar.sha1 @@ -0,0 +1 @@ +e468c349ce410171a1d5df7fa0fa377d52c5d651 \ No newline at end of file diff --git a/x-pack/snapshot-tool/qa/s3/build.gradle b/x-pack/snapshot-tool/qa/s3/build.gradle index 0ca2a171d48c..e9b727f4ae60 100644 --- a/x-pack/snapshot-tool/qa/s3/build.gradle +++ b/x-pack/snapshot-tool/qa/s3/build.gradle @@ -56,6 +56,8 @@ if (useS3Fixture) { apply plugin: 'elasticsearch.test.fixtures' + testFixtures.useFixture() + task writeDockerFile { File minioDockerfile = new File("${project.buildDir}/minio-docker/Dockerfile") outputs.file(minioDockerfile)