Use Java 15 text blocks for JSON and multiline strings (#80751)

The ES code base is quite JSON heavy. It uses a lot of multi-line JSON requests in tests which need to be escaped and concatenated which in turn makes them hard to read. Let's try to leverage Java 15 text blocks for representing them.
This commit is contained in:
Artem Prigoda 2021-12-15 18:01:28 +01:00 committed by GitHub
parent ca01b5fe49
commit 763d6d510f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
730 changed files with 28591 additions and 25000 deletions

View file

@ -85,55 +85,46 @@ public class DependenciesGraphTask extends DefaultTask {
for (final Dependency dependency : runtimeDependencies) {
final String id = dependency.getGroup() + ":" + dependency.getName();
final String versionedId = id + "@" + dependency.getVersion();
final StringBuilder packageString = new StringBuilder();
final StringBuilder nodeString = new StringBuilder();
if (dependency instanceof ProjectDependency) {
continue;
}
packageString.append("{\"id\": \"")
.append(versionedId)
.append("\",\"info\": {\"name\": \"")
.append(id)
.append("\",\"version\": \"")
.append(dependency.getVersion())
.append("\"}}");
packages.add(packageString.toString());
nodeString.append("{\"nodeId\": \"")
.append(versionedId)
.append("\",\"pkgId\": \"")
.append(versionedId)
.append("\",\"deps\": []}");
packages.add("""
{"id": "%s","info": {"name": "%s","version": "%s"}}\
""".formatted(versionedId, id, dependency.getVersion()));
nodeString.append("""
{"nodeId": "%s","pkgId": "%s","deps": []}\
""".formatted(versionedId, versionedId));
nodes.add(nodeString.toString());
nodeIds.add("{\"nodeId\": \"" + versionedId + "\"}");
nodeIds.add("""
{"nodeId": "%s"}\
""".formatted(versionedId));
}
// We add one package and one node for each dependency, it suffices to check packages.
if (packages.size() > 0) {
final String projectName = "elastic/elasticsearch" + getProject().getPath();
final StringBuilder output = new StringBuilder();
output.append("{\"depGraph\": {\"schemaVersion\": \"1.2.0\",\"pkgManager\": {\"name\": \"gradle\"},\"pkgs\": [")
.append("{\"id\": \"")
.append(projectName)
.append("@0.0.0")
.append("\", \"info\": {\"name\": \"")
.append(projectName)
.append("\", \"version\": \"0.0.0\"}},")
.append(String.join(",", packages))
.append("],\"graph\": {\"rootNodeId\": \"")
.append(projectName)
.append("@0.0.0")
.append("\",\"nodes\": [")
.append("{\"nodeId\": \"")
.append(projectName)
.append("@0.0.0")
.append("\",\"pkgId\": \"")
.append(projectName)
.append("@0.0.0")
.append("\",\"deps\": [")
.append(String.join(",", nodeIds))
.append("]},")
.append(String.join(",", nodes))
.append("]}}}");
getLogger().debug("Dependency Graph: " + output.toString());
final String output = """
{
"depGraph": {
"schemaVersion": "1.2.0",
"pkgManager": {"name": "gradle"},
"pkgs": [
{
"id": "%s@0.0.0",
"info": {"name": "%1$s", "version": "0.0.0"}
},
%s
],
"graph": {
"rootNodeId": "%1$s@0.0.0",
"nodes": [
{ "nodeId": "%1$s@0.0.0","pkgId": "%1$s@0.0.0","deps": [%s] },
%s
]
}
}
}""".formatted(projectName, String.join(",", packages), String.join(",", nodeIds), String.join(",", nodes));
getLogger().debug("Dependency Graph: " + output);
try (CloseableHttpClient client = HttpClients.createDefault()) {
HttpPost postRequest = new HttpPost(url);
postRequest.addHeader("Authorization", "token " + token);

View file

@ -154,8 +154,11 @@ public abstract class DockerSupportService implements BuildService<DockerSupport
// Some other problem, print the error
final String message = String.format(
Locale.ROOT,
"a problem occurred while using Docker from [%s]%s yet it is required to run the following task%s: \n%s\n"
+ "the problem is that Docker exited with exit code [%d] with standard error output:\n%s",
"""
a problem occurred while using Docker from [%s]%s yet it is required to run the following task%s:
%s
the problem is that Docker exited with exit code [%d] with standard error output:
%s""",
availability.path,
availability.version == null ? "" : " v" + availability.version,
tasks.size() > 1 ? "s" : "",

View file

@ -318,16 +318,11 @@ public class DependencyLicensesTask extends DefaultTask {
String sha = getSha1(jar);
if (expectedSha.equals(sha) == false) {
final String exceptionMessage = String.format(
Locale.ROOT,
"SHA has changed! Expected %s for %s but got %s."
+ "\nThis usually indicates a corrupt dependency cache or artifacts changed upstream."
+ "\nEither wipe your cache, fix the upstream artifact, or delete %s and run updateShas",
expectedSha,
jarName,
sha,
shaFile
);
final String exceptionMessage = String.format(Locale.ROOT, """
SHA has changed! Expected %s for %s but got %s.
This usually indicates a corrupt dependency cache or artifacts changed upstream.
Either wipe your cache, fix the upstream artifact, or delete %s and run updateShas
""", expectedSha, jarName, sha, shaFile);
throw new GradleException(exceptionMessage);
}

View file

@ -22,68 +22,55 @@ public class LicenseAnalyzer {
*/
private static final LicenseMatcher[] matchers = new LicenseMatcher[] {
new LicenseMatcher("Apache-2.0", true, false, Pattern.compile("Apache.*License.*[vV]ersion.*2\\.0", Pattern.DOTALL)),
new LicenseMatcher(
"BSD-2-Clause",
true,
false,
Pattern.compile(
("Redistribution and use in source and binary forms, with or without\n"
+ "modification, are permitted provided that the following conditions\n"
+ "are met:\n"
+ "\n"
+ " 1\\. Redistributions of source code must retain the above copyright\n"
+ " notice, this list of conditions and the following disclaimer\\.\n"
+ " 2\\. Redistributions in binary form must reproduce the above copyright\n"
+ " notice, this list of conditions and the following disclaimer in the\n"
+ " documentation and/or other materials provided with the distribution\\.\n"
+ "\n"
+ "THIS SOFTWARE IS PROVIDED BY .+ (``|''|\")AS IS(''|\") AND ANY EXPRESS OR\n"
+ "IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n"
+ "OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED\\.\n"
+ "IN NO EVENT SHALL .+ BE LIABLE FOR ANY DIRECT, INDIRECT,\n"
+ "INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES \\(INCLUDING, BUT\n"
+ "NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n"
+ "DATA, OR PROFITS; OR BUSINESS INTERRUPTION\\) HOWEVER CAUSED AND ON ANY\n"
+ "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n"
+ "\\(INCLUDING NEGLIGENCE OR OTHERWISE\\) ARISING IN ANY WAY OUT OF THE USE OF\n"
+ "THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\\.").replaceAll("\\s+", "\\\\s*"),
Pattern.DOTALL
)
),
new LicenseMatcher(
"BSD-3-Clause",
true,
false,
Pattern.compile(
("\n"
+ "Redistribution and use in source and binary forms, with or without\n"
+ "modification, are permitted provided that the following conditions\n"
+ "are met:\n"
+ "\n"
+ " (1\\.)? Redistributions of source code must retain the above copyright\n"
+ " notice, this list of conditions and the following disclaimer\\.\n"
+ " (2\\.)? Redistributions in binary form must reproduce the above copyright\n"
+ " notice, this list of conditions and the following disclaimer in the\n"
+ " documentation and/or other materials provided with the distribution\\.\n"
+ " ((3\\.)? The name of .+ may not be used to endorse or promote products\n"
+ " derived from this software without specific prior written permission\\.|\n"
+ " (3\\.)? Neither the name of .+ nor the names of its\n"
+ " contributors may be used to endorse or promote products derived from\n"
+ " this software without specific prior written permission\\.)\n"
+ "\n"
+ "THIS SOFTWARE IS PROVIDED BY .+ (``|''|\")AS IS(''|\") AND ANY EXPRESS OR\n"
+ "IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n"
+ "OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED\\.\n"
+ "IN NO EVENT SHALL .+ BE LIABLE FOR ANY DIRECT, INDIRECT,\n"
+ "INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES \\(INCLUDING, BUT\n"
+ "NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n"
+ "DATA, OR PROFITS; OR BUSINESS INTERRUPTION\\) HOWEVER CAUSED AND ON ANY\n"
+ "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n"
+ "\\(INCLUDING NEGLIGENCE OR OTHERWISE\\) ARISING IN ANY WAY OUT OF THE USE OF\n"
+ "THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\\.\n").replaceAll("\\s+", "\\\\s*"),
Pattern.DOTALL
)
),
new LicenseMatcher("BSD-2-Clause", true, false, Pattern.compile(("""
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1\\. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer\\.
2\\. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution\\.
THIS SOFTWARE IS PROVIDED BY .+ (``|''|")AS IS(''|") AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED\\.
IN NO EVENT SHALL .+ BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES \\(INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION\\) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
\\(INCLUDING NEGLIGENCE OR OTHERWISE\\) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\\.""").replaceAll("\\s+", "\\\\s*"), Pattern.DOTALL)),
new LicenseMatcher("BSD-3-Clause", true, false, Pattern.compile(("""
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
(1\\.)? Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer\\.
(2\\.)? Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution\\.
((3\\.)? The name of .+ may not be used to endorse or promote products
derived from this software without specific prior written permission\\.|
(3\\.)? Neither the name of .+ nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission\\.)
THIS SOFTWARE IS PROVIDED BY .+ (``|''|")AS IS(''|") AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED\\.
IN NO EVENT SHALL .+ BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES \\(INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION\\) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
\\(INCLUDING NEGLIGENCE OR OTHERWISE\\) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\\.
""").replaceAll("\\s+", "\\\\s*"), Pattern.DOTALL)),
new LicenseMatcher(
"CDDL-1.0",
true,
@ -97,50 +84,39 @@ public class LicenseAnalyzer {
Pattern.compile("COMMON DEVELOPMENT AND DISTRIBUTION LICENSE.*Version 1.1", Pattern.DOTALL)
),
new LicenseMatcher("ICU", true, false, Pattern.compile("ICU License - ICU 1.8.1 and later", Pattern.DOTALL)),
new LicenseMatcher(
"MIT",
true,
false,
Pattern.compile(
("\n"
+ "Permission is hereby granted, free of charge, to any person obtaining a copy of\n"
+ "this software and associated documentation files \\(the \"Software\"\\), to deal in\n"
+ "the Software without restriction, including without limitation the rights to\n"
+ "use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies\n"
+ "of the Software, and to permit persons to whom the Software is furnished to do\n"
+ "so, subject to the following conditions:\n"
+ "\n"
+ "The above copyright notice and this permission notice shall be included in all\n"
+ "copies or substantial portions of the Software\\.\n"
+ "\n"
+ "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n"
+ "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n"
+ "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\\. IN NO EVENT SHALL THE\n"
+ "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n"
+ "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n"
+ "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n"
+ "SOFTWARE\\.\n").replaceAll("\\s+", "\\\\s*"),
Pattern.DOTALL
)
),
new LicenseMatcher("MIT", true, false, Pattern.compile(("""
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files \\(the "Software"\\), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software\\.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\\. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE\\.
""").replaceAll("\\s+", "\\\\s*"), Pattern.DOTALL)),
new LicenseMatcher(
"MIT-0",
true,
false,
Pattern.compile(
("MIT No Attribution\n"
+ "Copyright .+\n"
+ "\n"
+ "Permission is hereby granted, free of charge, to any person obtaining a copy of "
+ "this software and associated documentation files \\(the \"Software\"\\), to deal in the Software without "
+ "restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, "
+ "and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so.\n"
+ "\n"
+ "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, "
+ "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND "
+ "NONINFRINGEMENT\\. IN NO EVENT SHALL THE AUTHORS OR "
+ "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR "
+ "OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n")
("""
MIT No Attribution
Copyright .+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files \\(the "Software"\\), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\\. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
""")
.replaceAll("\\s+", "\\\\s*"),
Pattern.DOTALL
)
@ -152,17 +128,10 @@ public class LicenseAnalyzer {
new LicenseMatcher("EDL-1.0", true, false, Pattern.compile("Eclipse Distribution License - v 1.0", Pattern.DOTALL)),
new LicenseMatcher("LGPL-2.1", true, true, Pattern.compile("GNU LESSER GENERAL PUBLIC LICENSE.*Version 2.1", Pattern.DOTALL)),
new LicenseMatcher("LGPL-3.0", true, true, Pattern.compile("GNU LESSER GENERAL PUBLIC LICENSE.*Version 3", Pattern.DOTALL)),
new LicenseMatcher(
"GeoLite",
false,
false,
Pattern.compile(
("The Elastic GeoIP Database Service uses the GeoLite2 Data created "
+ "and licensed by MaxMind,\nwhich is governed by MaxMinds GeoLite2 End User License Agreement, "
+ "available at https://www.maxmind.com/en/geolite2/eula.\n").replaceAll("\\s+", "\\\\s*"),
Pattern.DOTALL
)
),
new LicenseMatcher("GeoLite", false, false, Pattern.compile(("""
The Elastic GeoIP Database Service uses the GeoLite2 Data created and licensed by MaxMind,
which is governed by MaxMinds GeoLite2 End User License Agreement, available at https://www.maxmind.com/en/geolite2/eula.
""").replaceAll("\\s+", "\\\\s*"), Pattern.DOTALL)),
new LicenseMatcher(
"GeoIp-Database-Service",
false,

View file

@ -212,16 +212,14 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase {
public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception {
Request request = new Request("PUT", "/test-ro");
request.setJsonEntity(
"{\n"
+ " \"settings\" : {\n"
+ " \"index\" : {\n"
+ " \"blocks.write\" : true\n"
+ " }\n"
+ " }\n"
+ " \n"
+ "}"
);
request.setJsonEntity("""
{
"settings": {
"index": {
"blocks.write": true
}
}
}""");
Response response = client().performRequest(request);
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));

View file

@ -380,7 +380,8 @@ public class ClusterClientIT extends ESRestHighLevelClientTestCase {
public void testComponentTemplates() throws Exception {
String templateName = "my-template";
Settings settings = Settings.builder().put("index.number_of_shards", 1).build();
CompressedXContent mappings = new CompressedXContent("{\"properties\":{\"host_name\":{\"type\":\"keyword\"}}}");
CompressedXContent mappings = new CompressedXContent("""
{"properties":{"host_name":{"type":"keyword"}}}""");
AliasMetadata alias = AliasMetadata.builder("alias").writeIndex(true).build();
Template template = new Template(settings, mappings, Map.of("alias", alias));
ComponentTemplate componentTemplate = new ComponentTemplate(template, 1L, new HashMap<>());

View file

@ -180,7 +180,8 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
assertFalse(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync));
}
IndexRequest index = new IndexRequest("index").id("id");
index.source("{\"field1\":\"value1\",\"field2\":\"value2\"}", XContentType.JSON);
index.source("""
{"field1":"value1","field2":"value2"}""", XContentType.JSON);
index.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
highLevelClient().index(index, RequestOptions.DEFAULT);
{
@ -205,7 +206,8 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
assertFalse(execute(getRequest, highLevelClient()::existsSource, highLevelClient()::existsSourceAsync));
}
IndexRequest index = new IndexRequest("index").id("id");
index.source("{\"field1\":\"value1\",\"field2\":\"value2\"}", XContentType.JSON);
index.source("""
{"field1":"value1","field2":"value2"}""", XContentType.JSON);
index.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
highLevelClient().index(index, RequestOptions.DEFAULT);
{
@ -228,7 +230,8 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
assertFalse(execute(getRequest, highLevelClient()::existsSource, highLevelClient()::existsSourceAsync));
}
IndexRequest index = new IndexRequest("index").id("id");
index.source("{\"field1\":\"value1\",\"field2\":\"value2\"}", XContentType.JSON);
index.source("""
{"field1":"value1","field2":"value2"}""", XContentType.JSON);
index.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
highLevelClient().index(index, RequestOptions.DEFAULT);
{
@ -283,7 +286,8 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
assertEquals("index", exception.getMetadata("es.index").get(0));
}
IndexRequest index = new IndexRequest("index").id("id");
String document = "{\"field1\":\"value1\",\"field2\":\"value2\"}";
String document = """
{"field1":"value1","field2":"value2"}""";
index.source(document, XContentType.JSON);
index.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
highLevelClient().index(index, RequestOptions.DEFAULT);
@ -423,7 +427,8 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
assertEquals("index", exception.getMetadata("es.index").get(0));
}
IndexRequest index = new IndexRequest("index").id("id");
String document = "{\"field1\":\"value1\",\"field2\":\"value2\"}";
String document = """
{"field1":"value1","field2":"value2"}""";
index.source(document, XContentType.JSON);
index.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
highLevelClient().index(index, RequestOptions.DEFAULT);
@ -1073,7 +1078,8 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
{
// prepare : index docs
Settings settings = Settings.builder().put("number_of_shards", 1).put("number_of_replicas", 0).build();
String mappings = "\"properties\":{\"field\":{\"type\":\"text\"}}";
String mappings = """
"properties":{"field":{"type":"text"}}""";
createIndex(sourceIndex, settings, mappings);
assertEquals(
RestStatus.OK,
@ -1163,7 +1169,9 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
{
// prepare : index docs
Settings settings = Settings.builder().put("number_of_shards", 1).put("number_of_replicas", 0).build();
String mappings = "\"properties\":{\"field\":{\"type\":\"text\"}, \"field2\":{\"type\":\"text\"}}";
String mappings = """
"properties":{"field":{"type":"text"}, "field2":{"type":"text"}}
""";
createIndex(sourceIndex, settings, mappings);
assertEquals(
RestStatus.OK,

View file

@ -92,27 +92,28 @@ public class GetAliasesResponseTests extends AbstractXContentTestCase<GetAliases
}
public void testFromXContentWithElasticsearchException() throws IOException {
String xContent = "{"
+ " \"error\": {"
+ " \"root_cause\": ["
+ " {"
+ " \"type\": \"index_not_found_exception\","
+ " \"reason\": \"no such index [index]\","
+ " \"resource.type\": \"index_or_alias\","
+ " \"resource.id\": \"index\","
+ " \"index_uuid\": \"_na_\","
+ " \"index\": \"index\""
+ " }"
+ " ],"
+ " \"type\": \"index_not_found_exception\","
+ " \"reason\": \"no such index [index]\","
+ " \"resource.type\": \"index_or_alias\","
+ " \"resource.id\": \"index\","
+ " \"index_uuid\": \"_na_\","
+ " \"index\": \"index\""
+ " },"
+ " \"status\": 404"
+ "}";
String xContent = """
{
"error": {
"root_cause": [
{
"type": "index_not_found_exception",
"reason": "no such index [index]",
"resource.type": "index_or_alias",
"resource.id": "index",
"index_uuid": "_na_",
"index": "index"
}
],
"type": "index_not_found_exception",
"reason": "no such index [index]",
"resource.type": "index_or_alias",
"resource.id": "index",
"index_uuid": "_na_",
"index": "index"
},
"status": 404
}""";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, xContent)) {
GetAliasesResponse getAliasesResponse = GetAliasesResponse.fromXContent(parser);
@ -126,7 +127,11 @@ public class GetAliasesResponseTests extends AbstractXContentTestCase<GetAliases
}
public void testFromXContentWithNoAliasFound() throws IOException {
String xContent = "{" + " \"error\": \"alias [aa] missing\"," + " \"status\": 404" + "}";
String xContent = """
{
"error": "alias [aa] missing",
"status": 404
}""";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, xContent)) {
GetAliasesResponse getAliasesResponse = GetAliasesResponse.fromXContent(parser);
assertThat(getAliasesResponse.status(), equalTo(RestStatus.NOT_FOUND));
@ -136,15 +141,16 @@ public class GetAliasesResponseTests extends AbstractXContentTestCase<GetAliases
}
public void testFromXContentWithMissingAndFoundAlias() throws IOException {
String xContent = "{"
+ " \"error\": \"alias [something] missing\","
+ " \"status\": 404,"
+ " \"index\": {"
+ " \"aliases\": {"
+ " \"alias\": {}"
+ " }"
+ " }"
+ "}";
String xContent = """
{
"error": "alias [something] missing",
"status": 404,
"index": {
"aliases": {
"alias": {}
}
}
}""";
final String index = "index";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, xContent)) {
GetAliasesResponse response = GetAliasesResponse.fromXContent(parser);

View file

@ -31,23 +31,28 @@ public class GraphIT extends ESRestHighLevelClientTestCase {
public void indexDocuments() throws IOException {
// Create chain of doc IDs across indices 1->2->3
Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/_doc/1");
doc1.setJsonEntity("{ \"num\":[1], \"const\":\"start\"}");
doc1.setJsonEntity("""
{ "num":[1], "const":"start"}""");
client().performRequest(doc1);
Request doc2 = new Request(HttpPut.METHOD_NAME, "/index2/_doc/1");
doc2.setJsonEntity("{\"num\":[1,2], \"const\":\"foo\"}");
doc2.setJsonEntity("""
{"num":[1,2], "const":"foo"}""");
client().performRequest(doc2);
Request doc3 = new Request(HttpPut.METHOD_NAME, "/index2/_doc/2");
doc3.setJsonEntity("{\"num\":[2,3], \"const\":\"foo\"}");
doc3.setJsonEntity("""
{"num":[2,3], "const":"foo"}""");
client().performRequest(doc3);
Request doc4 = new Request(HttpPut.METHOD_NAME, "/index_no_field_data/_doc/2");
doc4.setJsonEntity("{\"num\":\"string\", \"const\":\"foo\"}");
doc4.setJsonEntity("""
{"num":"string", "const":"foo"}""");
client().performRequest(doc4);
Request doc5 = new Request(HttpPut.METHOD_NAME, "/index_no_field_data/_doc/2");
doc5.setJsonEntity("{\"num\":[2,4], \"const\":\"foo\"}");
doc5.setJsonEntity("""
{"num":[2,4], "const":"foo"}""");
client().performRequest(doc5);
client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh"));

View file

@ -20,7 +20,8 @@ import static org.hamcrest.Matchers.equalTo;
public class HighLevelRestClientCompressionIT extends ESRestHighLevelClientTestCase {
private static final String GZIP_ENCODING = "gzip";
private static final String SAMPLE_DOCUMENT = "{\"name\":{\"first name\":\"Steve\",\"last name\":\"Jobs\"}}";
private static final String SAMPLE_DOCUMENT = """
{"name":{"first name":"Steve","last name":"Jobs"}}""";
public void testCompressesResponseIfRequested() throws IOException {
Request doc = new Request(HttpPut.METHOD_NAME, "/company/_doc/1");

View file

@ -18,7 +18,8 @@ import static org.hamcrest.Matchers.equalTo;
public class HighLevelRestClientFilterPathIT extends ESRestHighLevelClientTestCase {
private static final String SAMPLE_DOCUMENT = "{\"name\":{\"first name\":\"Steve\",\"last name\":\"Jobs\"}}";
private static final String SAMPLE_DOCUMENT = """
{"name":{"first name":"Steve","last name":"Jobs"}}""";
private static final String FILTER_PATH_PARAM = "filter_path";
private static final String FILTER_PATH_PARAM_VALUE = "-hits.hits._index,-hits.hits._type,-hits.hits.matched_queries";

View file

@ -357,7 +357,9 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
public void testGetIndex() throws IOException {
String indexName = "get_index_test";
Settings basicSettings = Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0).build();
String mappings = "\"properties\":{\"field-1\":{\"type\":\"integer\"}}";
String mappings = """
"properties":{"field-1":{"type":"integer"}}
""";
createIndex(indexName, basicSettings, mappings);
GetIndexRequest getIndexRequest = new GetIndexRequest(indexName).includeDefaults(false);
@ -376,7 +378,8 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
MappingMetadata mappingMetadata = getIndexResponse.getMappings().get(indexName);
assertNotNull(mappingMetadata);
assertEquals("_doc", mappingMetadata.type());
assertEquals("{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}", mappingMetadata.source().string());
assertEquals("""
{"properties":{"field-1":{"type":"integer"}}}""", mappingMetadata.source().string());
Object o = mappingMetadata.getSourceAsMap().get("properties");
assertThat(o, instanceOf(Map.class));
// noinspection unchecked
@ -390,7 +393,9 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
public void testGetIndexWithDefaults() throws IOException {
String indexName = "get_index_test";
Settings basicSettings = Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0).build();
String mappings = "\"properties\":{\"field-1\":{\"type\":\"integer\"}}";
String mappings = """
"properties":{"field-1":{"type":"integer"}}
""";
createIndex(indexName, basicSettings, mappings);
GetIndexRequest getIndexRequest = new GetIndexRequest(indexName).includeDefaults(true);
@ -520,7 +525,8 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
final GetFieldMappingsResponse.FieldMappingMetadata metadata = new GetFieldMappingsResponse.FieldMappingMetadata(
"field",
new BytesArray("{\"field\":{\"type\":\"text\"}}")
new BytesArray("""
{"field":{"type":"text"}}""")
);
assertThat(fieldMappingMap, equalTo(Collections.singletonMap("field", metadata)));
}
@ -1067,7 +1073,9 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
assertEquals("test_new", rolloverResponse.getNewIndex());
}
{
String mappings = "{\"properties\":{\"field2\":{\"type\":\"keyword\"}}}";
String mappings = """
{"properties":{"field2":{"type":"keyword"}}}
""";
rolloverRequest.getCreateIndexRequest().mapping(mappings, XContentType.JSON);
rolloverRequest.dryRun(false);
rolloverRequest.addMaxIndexSizeCondition(new ByteSizeValue(1, ByteSizeUnit.MB));
@ -1514,18 +1522,16 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest("my-template", List.of("pattern-1", "name-*")).order(10)
.create(randomBoolean())
.settings(Settings.builder().put("number_of_shards", "3").put("number_of_replicas", "0"))
.mapping(
"{"
+ " \"my_doc_type\": {"
+ " \"properties\": {"
+ " \"host_name\": {"
+ " \"type\": \"keyword\""
+ " }"
+ " }"
+ " }"
+ "}",
XContentType.JSON
)
.mapping("""
{
"my_doc_type": {
"properties": {
"host_name": {
"type": "keyword"
}
}
}
}""", XContentType.JSON)
.alias(new Alias("alias-1").indexRouting("abc"))
.alias(new Alias("{index}-write").searchRouting("xyz"));
@ -1607,17 +1613,16 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
createIndex(index, Settings.EMPTY);
Request postDoc = new Request(HttpPost.METHOD_NAME, "/" + index + "/_doc");
postDoc.setJsonEntity(
"{"
+ " \"type\": \"act\","
+ " \"line_id\": 1,"
+ " \"play_name\": \"Henry IV\","
+ " \"speech_number\": \"\","
+ " \"line_number\": \"\","
+ " \"speaker\": \"\","
+ " \"text_entry\": \"ACT I\""
+ "}"
);
postDoc.setJsonEntity("""
{
"type": "act",
"line_id": 1,
"play_name": "Henry IV",
"speech_number": "",
"line_number": "",
"speaker": "",
"text_entry": "ACT I"
}""");
assertOK(client().performRequest(postDoc));
QueryBuilder builder = QueryBuilders.queryStringQuery("line_id:foo").lenient(false);
@ -1860,7 +1865,8 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
public void testDataStreams() throws Exception {
String dataStreamName = "data-stream";
CompressedXContent mappings = new CompressedXContent("{\"properties\":{\"@timestamp\":{\"type\":\"date\"}}}");
CompressedXContent mappings = new CompressedXContent("""
{"properties":{"@timestamp":{"type":"date"}}}""");
Template template = new Template(null, mappings, null);
ComposableIndexTemplate indexTemplate = new ComposableIndexTemplate(
Collections.singletonList(dataStreamName),
@ -1952,7 +1958,8 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
public void testIndexTemplates() throws Exception {
String templateName = "my-template";
Settings settings = Settings.builder().put("index.number_of_shards", 1).build();
CompressedXContent mappings = new CompressedXContent("{\"properties\":{\"host_name\":{\"type\":\"keyword\"}}}");
CompressedXContent mappings = new CompressedXContent("""
{"properties":{"host_name":{"type":"keyword"}}}""");
AliasMetadata alias = AliasMetadata.builder("alias").writeIndex(true).build();
Template template = new Template(settings, mappings, Map.of("alias", alias));
List<String> pattern = List.of("pattern");
@ -2028,7 +2035,8 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
public void testSimulateIndexTemplate() throws Exception {
String templateName = "my-template";
Settings settings = Settings.builder().put("index.number_of_shards", 1).build();
CompressedXContent mappings = new CompressedXContent("{\"properties\":{\"host_name\":{\"type\":\"keyword\"}}}");
CompressedXContent mappings = new CompressedXContent("""
{"properties":{"host_name":{"type":"keyword"}}}""");
AliasMetadata alias = AliasMetadata.builder("alias").writeIndex(true).build();
Template template = new Template(settings, mappings, Map.of("alias", alias));
List<String> pattern = List.of("pattern");

View file

@ -826,14 +826,9 @@ public class IndicesRequestConvertersTests extends ESTestCase {
}
Map<String, String> expectedParams = new HashMap<>();
if (ESTestCase.randomBoolean()) {
putTemplateRequest.mapping(
"{ \"properties\": { \"field-"
+ ESTestCase.randomInt()
+ "\" : { \"type\" : \""
+ ESTestCase.randomFrom("text", "keyword")
+ "\" }}}",
XContentType.JSON
);
putTemplateRequest.mapping("""
{ "properties": { "field-%s" : { "type" : "%s" }}}
""".formatted(ESTestCase.randomInt(), ESTestCase.randomFrom("text", "keyword")), XContentType.JSON);
}
if (ESTestCase.randomBoolean()) {
putTemplateRequest.alias(new Alias("alias-" + ESTestCase.randomInt()));

View file

@ -82,29 +82,30 @@ public class IngestRequestConvertersTests extends ESTestCase {
public void testSimulatePipeline() throws IOException {
String pipelineId = ESTestCase.randomBoolean() ? "some_pipeline_id" : null;
boolean verbose = ESTestCase.randomBoolean();
String json = "{"
+ " \"pipeline\": {"
+ " \"description\": \"_description\","
+ " \"processors\": ["
+ " {"
+ " \"set\": {"
+ " \"field\": \"field2\","
+ " \"value\": \"_value\""
+ " }"
+ " }"
+ " ]"
+ " },"
+ " \"docs\": ["
+ " {"
+ " \"_index\": \"index\","
+ " \"_type\": \"_doc\","
+ " \"_id\": \"id\","
+ " \"_source\": {"
+ " \"foo\": \"rab\""
+ " }"
+ " }"
+ " ]"
+ "}";
String json = """
{
"pipeline": {
"description": "_description",
"processors": [
{
"set": {
"field": "field2",
"value": "_value"
}
}
]
},
"docs": [
{
"_index": "index",
"_type": "_doc",
"_id": "id",
"_source": {
"foo": "rab"
}
}
]
}""";
SimulatePipelineRequest request = new SimulatePipelineRequest(
new BytesArray(json.getBytes(StandardCharsets.UTF_8)),
XContentType.JSON

View file

@ -98,6 +98,7 @@ import org.elasticsearch.client.ml.job.config.MlFilter;
import org.elasticsearch.client.ml.job.config.MlFilterTests;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
@ -204,10 +205,8 @@ public class MLRequestConvertersTests extends ESTestCase {
request = MLRequestConverters.closeJob(closeJobRequest);
assertEquals("/_ml/anomaly_detectors/" + jobId + ",otherjobs*/_close", request.getEndpoint());
assertEquals(
"{\"job_id\":\"somejobid,otherjobs*\",\"timeout\":\"10m\",\"force\":true,\"allow_no_match\":false}",
requestEntityToString(request)
);
assertEquals("""
{"job_id":"somejobid,otherjobs*","timeout":"10m","force":true,"allow_no_match":false}""", requestEntityToString(request));
}
public void testDeleteExpiredData() throws Exception {
@ -225,12 +224,13 @@ public class MLRequestConvertersTests extends ESTestCase {
String expectedPath = jobId == null ? "/_ml/_delete_expired_data" : "/_ml/_delete_expired_data/" + jobId;
assertEquals(expectedPath, request.getEndpoint());
if (jobId == null) {
assertEquals("{\"requests_per_second\":" + requestsPerSec + ",\"timeout\":\"1h\"}", requestEntityToString(request));
assertEquals("""
{"requests_per_second":%s,"timeout":"1h"}\
""".formatted(requestsPerSec), requestEntityToString(request));
} else {
assertEquals(
"{\"job_id\":\"" + jobId + "\",\"requests_per_second\":" + requestsPerSec + ",\"timeout\":\"1h\"}",
requestEntityToString(request)
);
assertEquals("""
{"job_id":"%s","requests_per_second":%s,"timeout":"1h"}\
""".formatted(jobId, requestsPerSec), requestEntityToString(request));
}
}
@ -270,13 +270,15 @@ public class MLRequestConvertersTests extends ESTestCase {
flushJobRequest.setAdvanceTime("100");
flushJobRequest.setCalcInterim(true);
request = MLRequestConverters.flushJob(flushJobRequest);
assertEquals(
"{\"job_id\":\""
+ jobId
+ "\",\"calc_interim\":true,\"start\":\"105\","
+ "\"end\":\"200\",\"advance_time\":\"100\",\"skip_time\":\"1000\"}",
requestEntityToString(request)
);
assertEquals(XContentHelper.stripWhitespace("""
{
"job_id": "%s",
"calc_interim": true,
"start": "105",
"end": "200",
"advance_time": "100",
"skip_time": "1000"
}""".formatted(jobId)), requestEntityToString(request));
}
public void testForecastJob() throws Exception {

View file

@ -96,27 +96,25 @@ public class MachineLearningGetResultsIT extends ESRestHighLevelClientTestCase {
IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX);
double bucketScore = randomDoubleBetween(0.0, 100.0, true);
bucketStats.report(bucketScore);
indexRequest.source(
"{\"job_id\":\""
+ JOB_ID
+ "\", \"result_type\":\"bucket\", \"timestamp\": "
+ timestamp
+ ","
+ "\"bucket_span\": 3600,\"is_interim\": "
+ isInterim
+ ", \"anomaly_score\": "
+ bucketScore
+ ", \"bucket_influencers\":[{\"job_id\": \""
+ JOB_ID
+ "\", \"result_type\":\"bucket_influencer\", "
+ "\"influencer_field_name\": \"bucket_time\", \"timestamp\": "
+ timestamp
+ ", \"bucket_span\": 3600, "
+ "\"is_interim\": "
+ isInterim
+ "}]}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "%s",
"result_type": "bucket",
"timestamp": %s,
"bucket_span": 3600,
"is_interim": %s,
"anomaly_score": %s,
"bucket_influencers": [
{
"job_id": "%s",
"result_type": "bucket_influencer",
"influencer_field_name": "bucket_time",
"timestamp": %s,
"bucket_span": 3600,
"is_interim": %s
}
]
}""".formatted(JOB_ID, timestamp, isInterim, bucketScore, JOB_ID, timestamp, isInterim), XContentType.JSON);
bulkRequest.add(indexRequest);
}
@ -135,40 +133,32 @@ public class MachineLearningGetResultsIT extends ESRestHighLevelClientTestCase {
double recordScore = randomDoubleBetween(0.0, 100.0, true);
recordStats.report(recordScore);
double p = randomDoubleBetween(0.0, 0.05, false);
indexRequest.source(
"{\"job_id\":\""
+ JOB_ID
+ "\", \"result_type\":\"record\", \"timestamp\": "
+ timestamp
+ ","
+ "\"bucket_span\": 3600,\"is_interim\": "
+ isInterim
+ ", \"record_score\": "
+ recordScore
+ ", \"probability\": "
+ p
+ "}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "%s",
"result_type": "record",
"timestamp": %s,
"bucket_span": 3600,
"is_interim": %s,
"record_score": %s,
"probability": %s
}""".formatted(JOB_ID, timestamp, isInterim, recordScore, p), XContentType.JSON);
bulkRequest.add(indexRequest);
}
private void addCategoryIndexRequest(long categoryId, String categoryName, BulkRequest bulkRequest) {
IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX);
indexRequest.source(
"{\"job_id\":\""
+ JOB_ID
+ "\", \"category_id\": "
+ categoryId
+ ", \"terms\": \""
+ categoryName
+ "\", \"regex\": \".*?"
+ categoryName
+ ".*\", \"max_matching_length\": 3, \"examples\": [\""
+ categoryName
+ "\"]}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "%s",
"category_id": %s,
"terms": "%s",
"regex": ".*?%s.*",
"max_matching_length": 3,
"examples": [
"%s"
]
}""".formatted(JOB_ID, categoryId, categoryName, categoryName, categoryName), XContentType.JSON);
bulkRequest.add(indexRequest);
}
@ -1114,20 +1104,17 @@ public class MachineLearningGetResultsIT extends ESRestHighLevelClientTestCase {
BulkRequest bulkRequest = new BulkRequest();
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (Bucket bucket : firstBuckets) {
String anomalyScore = String.valueOf(bucket.getAnomalyScore() + 10.0);
IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX);
indexRequest.source(
"{\"job_id\":\""
+ anotherJobId
+ "\", \"result_type\":\"bucket\", \"timestamp\": "
+ bucket.getTimestamp().getTime()
+ ","
+ "\"bucket_span\": 3600,\"is_interim\": "
+ bucket.isInterim()
+ ", \"anomaly_score\": "
+ String.valueOf(bucket.getAnomalyScore() + 10.0)
+ "}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "%s",
"result_type": "bucket",
"timestamp": %s,
"bucket_span": 3600,
"is_interim": %s,
"anomaly_score": %s
}""".formatted(anotherJobId, bucket.getTimestamp().getTime(), bucket.isInterim(), anomalyScore), XContentType.JSON);
bulkRequest.add(indexRequest);
}
highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT);
@ -1317,22 +1304,18 @@ public class MachineLearningGetResultsIT extends ESRestHighLevelClientTestCase {
double score = isLast ? 90.0 : 42.0;
IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX);
indexRequest.source(
"{\"job_id\":\""
+ JOB_ID
+ "\", \"result_type\":\"influencer\", \"timestamp\": "
+ timestamp
+ ","
+ "\"bucket_span\": 3600,\"is_interim\": "
+ isInterim
+ ", \"influencer_score\": "
+ score
+ ", "
+ "\"influencer_field_name\":\"my_influencer\", \"influencer_field_value\": \"inf_1\", \"probability\":"
+ randomDouble()
+ "}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "%s",
"result_type": "influencer",
"timestamp": %s,
"bucket_span": 3600,
"is_interim": %s,
"influencer_score": %s,
"influencer_field_name": "my_influencer",
"influencer_field_value": "inf_1",
"probability": %s
}""".formatted(JOB_ID, timestamp, isInterim, score, randomDouble()), XContentType.JSON);
bulkRequest.add(indexRequest);
timestamp += 3600000L;
}

View file

@ -2728,24 +2728,26 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
putTrainedModel(modelId);
}
String regressionPipeline = "{"
+ " \"processors\": [\n"
+ " {\n"
+ " \"inference\": {\n"
+ " \"target_field\": \"regression_value\",\n"
+ " \"model_id\": \""
+ modelIdPrefix
+ 0
+ "\",\n"
+ " \"inference_config\": {\"regression\": {}},\n"
+ " \"field_map\": {\n"
+ " \"col1\": \"col1\",\n"
+ " \"col2\": \"col2\",\n"
+ " \"col3\": \"col3\",\n"
+ " \"col4\": \"col4\"\n"
+ " }\n"
+ " }\n"
+ " }]}\n";
String regressionPipeline = """
{
"processors": [
{
"inference": {
"target_field": "regression_value",
"model_id": "%s%s",
"inference_config": {
"regression": {}
},
"field_map": {
"col1": "col1",
"col2": "col2",
"col3": "col3",
"col4": "col4"
}
}
}
]
}""".formatted(modelIdPrefix, "0");
String pipelineId = "regression-stats-pipeline";
highLevelClient().ingest()
@ -3116,25 +3118,30 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared").id(documentId);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source(
"{\"job_id\":\""
+ jobId
+ "\", \"timestamp\":1541587919000, "
+ "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", "
+ "\"snapshot_id\":\""
+ snapshotId
+ "\", \"snapshot_doc_count\":1, \"model_size_stats\":{"
+ "\"job_id\":\""
+ jobId
+ "\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, "
+ "\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2,"
+ "\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, "
+ "\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000,"
+ "\"latest_result_time_stamp\":1519930800000, \"retain\":false, \"min_version\":\""
+ Version.CURRENT.toString()
+ "\"}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "%s",
"timestamp": 1541587919000,
"description": "State persisted due to job close at 2018-11-07T10:51:59+0000",
"snapshot_id": "%s",
"snapshot_doc_count": 1,
"model_size_stats": {
"job_id": "%s",
"result_type": "model_size_stats",
"model_bytes": 51722,
"total_by_field_count": 3,
"total_over_field_count": 0,
"total_partition_field_count": 2,
"bucket_allocation_failures_count": 0,
"memory_status": "ok",
"log_time": 1541587919000,
"timestamp": 1519930800000
},
"latest_record_time_stamp": 1519931700000,
"latest_result_time_stamp": 1519930800000,
"retain": false,
"min_version": "%s"
}""".formatted(jobId, snapshotId, jobId, Version.CURRENT.toString()), XContentType.JSON);
highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
}
@ -3147,27 +3154,34 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
String documentId = jobId + "_model_snapshot_" + snapshotId;
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared").id(documentId);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source(
"{\"job_id\":\""
+ jobId
+ "\", \"timestamp\":1541587919000, "
+ "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", "
+ "\"snapshot_id\":\""
+ snapshotId
+ "\", \"snapshot_doc_count\":1, \"model_size_stats\":{"
+ "\"job_id\":\""
+ jobId
+ "\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, "
+ "\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2,"
+ "\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, "
+ "\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000,"
+ "\"latest_result_time_stamp\":1519930800000, \"retain\":false, "
+ "\"quantiles\":{\"job_id\":\""
+ jobId
+ "\", \"timestamp\":1541587919000, "
+ "\"quantile_state\":\"state\"}}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "%s",
"timestamp": 1541587919000,
"description": "State persisted due to job close at 2018-11-07T10:51:59+0000",
"snapshot_id": "%s",
"snapshot_doc_count": 1,
"model_size_stats": {
"job_id": "%s",
"result_type": "model_size_stats",
"model_bytes": 51722,
"total_by_field_count": 3,
"total_over_field_count": 0,
"total_partition_field_count": 2,
"bucket_allocation_failures_count": 0,
"memory_status": "ok",
"log_time": 1541587919000,
"timestamp": 1519930800000
},
"latest_record_time_stamp": 1519931700000,
"latest_result_time_stamp": 1519930800000,
"retain": false,
"quantiles": {
"job_id": "%s",
"timestamp": 1541587919000,
"quantile_state": "state"
}
}""".formatted(jobId, snapshotId, jobId, jobId), XContentType.JSON);
highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
}
}

View file

@ -41,14 +41,16 @@ public class RestHighLevelClientExtTests extends ESTestCase {
public void testParseEntityCustomResponseSection() throws IOException {
{
HttpEntity jsonEntity = new NStringEntity("{\"custom1\":{ \"field\":\"value\"}}", ContentType.APPLICATION_JSON);
HttpEntity jsonEntity = new NStringEntity("""
{"custom1":{ "field":"value"}}""", ContentType.APPLICATION_JSON);
BaseCustomResponseSection customSection = restHighLevelClient.parseEntity(jsonEntity, BaseCustomResponseSection::fromXContent);
assertThat(customSection, instanceOf(CustomResponseSection1.class));
CustomResponseSection1 customResponseSection1 = (CustomResponseSection1) customSection;
assertEquals("value", customResponseSection1.value);
}
{
HttpEntity jsonEntity = new NStringEntity("{\"custom2\":{ \"array\": [\"item1\", \"item2\"]}}", ContentType.APPLICATION_JSON);
HttpEntity jsonEntity = new NStringEntity("""
{"custom2":{ "array": ["item1", "item2"]}}""", ContentType.APPLICATION_JSON);
BaseCustomResponseSection customSection = restHighLevelClient.parseEntity(jsonEntity, BaseCustomResponseSection::fromXContent);
assertThat(customSection, instanceOf(CustomResponseSection2.class));
CustomResponseSection2 customResponseSection2 = (CustomResponseSection2) customSection;

View file

@ -112,99 +112,112 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
public void indexDocuments() throws IOException {
{
Request doc1 = new Request(HttpPut.METHOD_NAME, "/index/_doc/1");
doc1.setJsonEntity("{\"type\":\"type1\", \"id\":1, \"num\":10, \"num2\":50}");
doc1.setJsonEntity("""
{"type":"type1", "id":1, "num":10, "num2":50}""");
client().performRequest(doc1);
Request doc2 = new Request(HttpPut.METHOD_NAME, "/index/_doc/2");
doc2.setJsonEntity("{\"type\":\"type1\", \"id\":2, \"num\":20, \"num2\":40}");
doc2.setJsonEntity("""
{"type":"type1", "id":2, "num":20, "num2":40}""");
client().performRequest(doc2);
Request doc3 = new Request(HttpPut.METHOD_NAME, "/index/_doc/3");
doc3.setJsonEntity("{\"type\":\"type1\", \"id\":3, \"num\":50, \"num2\":35}");
doc3.setJsonEntity("""
{"type":"type1", "id":3, "num":50, "num2":35}""");
client().performRequest(doc3);
Request doc4 = new Request(HttpPut.METHOD_NAME, "/index/_doc/4");
doc4.setJsonEntity("{\"type\":\"type2\", \"id\":4, \"num\":100, \"num2\":10}");
doc4.setJsonEntity("""
{"type":"type2", "id":4, "num":100, "num2":10}""");
client().performRequest(doc4);
Request doc5 = new Request(HttpPut.METHOD_NAME, "/index/_doc/5");
doc5.setJsonEntity("{\"type\":\"type2\", \"id\":5, \"num\":100, \"num2\":10}");
doc5.setJsonEntity("""
{"type":"type2", "id":5, "num":100, "num2":10}""");
client().performRequest(doc5);
}
{
Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/_doc/1");
doc1.setJsonEntity("{\"id\":1, \"field\":\"value1\", \"rating\": 7}");
doc1.setJsonEntity("""
{"id":1, "field":"value1", "rating": 7}""");
client().performRequest(doc1);
Request doc2 = new Request(HttpPut.METHOD_NAME, "/index1/_doc/2");
doc2.setJsonEntity("{\"id\":2, \"field\":\"value2\"}");
doc2.setJsonEntity("""
{"id":2, "field":"value2"}""");
client().performRequest(doc2);
}
{
Request create = new Request("PUT", "/index2");
create.setJsonEntity(
"{"
+ " \"mappings\": {"
+ " \"properties\": {"
+ " \"rating\": {"
+ " \"type\": \"keyword\""
+ " }"
+ " }"
+ " }"
+ "}"
);
create.setJsonEntity("""
{
"mappings": {
"properties": {
"rating": {
"type": "keyword"
}
}
}
}""");
client().performRequest(create);
Request doc3 = new Request(HttpPut.METHOD_NAME, "/index2/_doc/3");
doc3.setJsonEntity("{\"id\":3, \"field\":\"value1\", \"rating\": \"good\"}");
doc3.setJsonEntity("""
{"id":3, "field":"value1", "rating": "good"}""");
client().performRequest(doc3);
Request doc4 = new Request(HttpPut.METHOD_NAME, "/index2/_doc/4");
doc4.setJsonEntity("{\"id\":4, \"field\":\"value2\"}");
doc4.setJsonEntity("""
{"id":4, "field":"value2"}""");
client().performRequest(doc4);
}
{
Request doc5 = new Request(HttpPut.METHOD_NAME, "/index3/_doc/5");
doc5.setJsonEntity("{\"id\":5, \"field\":\"value1\"}");
doc5.setJsonEntity("""
{"id":5, "field":"value1"}""");
client().performRequest(doc5);
Request doc6 = new Request(HttpPut.METHOD_NAME, "/index3/_doc/6");
doc6.setJsonEntity("{\"id\":6, \"field\":\"value2\"}");
doc6.setJsonEntity("""
{"id":6, "field":"value2"}""");
client().performRequest(doc6);
}
{
Request create = new Request(HttpPut.METHOD_NAME, "/index4");
create.setJsonEntity(
"{"
+ " \"mappings\": {"
+ " \"properties\": {"
+ " \"field1\": {"
+ " \"type\": \"keyword\","
+ " \"store\": true"
+ " },"
+ " \"field2\": {"
+ " \"type\": \"keyword\","
+ " \"store\": true"
+ " }"
+ " }"
+ " }"
+ "}"
);
create.setJsonEntity("""
{
"mappings": {
"properties": {
"field1": {
"type": "keyword",
"store": true
},
"field2": {
"type": "keyword",
"store": true
}
}
}
}""");
client().performRequest(create);
Request doc1 = new Request(HttpPut.METHOD_NAME, "/index4/_doc/1");
doc1.setJsonEntity("{\"id\":1, \"field1\":\"value1\", \"field2\":\"value2\"}");
doc1.setJsonEntity("""
{"id":1, "field1":"value1", "field2":"value2"}""");
client().performRequest(doc1);
Request createFilteredAlias = new Request(HttpPost.METHOD_NAME, "/_aliases");
createFilteredAlias.setJsonEntity(
"{"
+ " \"actions\" : ["
+ " {"
+ " \"add\" : {"
+ " \"index\" : \"index4\","
+ " \"alias\" : \"alias4\","
+ " \"filter\" : { \"term\" : { \"field2\" : \"value1\" } }"
+ " }"
+ " }"
+ " ]"
+ "}"
);
createFilteredAlias.setJsonEntity("""
{
"actions": [
{
"add": {
"index": "index4",
"alias": "alias4",
"filter": {
"term": {
"field2": "value1"
}
}
}
}
]
}""");
client().performRequest(createFilteredAlias);
}
@ -493,68 +506,62 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
public void testSearchWithParentJoin() throws IOException {
final String indexName = "child_example";
Request createIndex = new Request(HttpPut.METHOD_NAME, "/" + indexName);
createIndex.setJsonEntity(
"{\n"
+ " \"mappings\": {\n"
+ " \"properties\" : {\n"
+ " \"qa_join_field\" : {\n"
+ " \"type\" : \"join\",\n"
+ " \"relations\" : { \"question\" : \"answer\" }\n"
+ " }\n"
+ " }\n"
+ " }"
+ "}"
);
createIndex.setJsonEntity("""
{
"mappings": {
"properties": {
"qa_join_field": {
"type": "join",
"relations": {
"question": "answer"
}
}
}
}
}""");
client().performRequest(createIndex);
Request questionDoc = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/_doc/1");
questionDoc.setJsonEntity(
"{\n"
+ " \"body\": \"<p>I have Windows 2003 server and i bought a new Windows 2008 server...\",\n"
+ " \"title\": \"Whats the best way to file transfer my site from server to a newer one?\",\n"
+ " \"tags\": [\n"
+ " \"windows-server-2003\",\n"
+ " \"windows-server-2008\",\n"
+ " \"file-transfer\"\n"
+ " ],\n"
+ " \"qa_join_field\" : \"question\"\n"
+ "}"
);
questionDoc.setJsonEntity("""
{
"body": "<p>I have Windows 2003 server and i bought a new Windows 2008 server...",
"title": "Whats the best way to file transfer my site from server to a newer one?",
"tags": [ "windows-server-2003", "windows-server-2008", "file-transfer" ],
"qa_join_field": "question"
}""");
client().performRequest(questionDoc);
Request answerDoc1 = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/_doc/2");
answerDoc1.addParameter("routing", "1");
answerDoc1.setJsonEntity(
"{\n"
+ " \"owner\": {\n"
+ " \"location\": \"Norfolk, United Kingdom\",\n"
+ " \"display_name\": \"Sam\",\n"
+ " \"id\": 48\n"
+ " },\n"
+ " \"body\": \"<p>Unfortunately you're pretty much limited to FTP...\",\n"
+ " \"qa_join_field\" : {\n"
+ " \"name\" : \"answer\",\n"
+ " \"parent\" : \"1\"\n"
+ " },\n"
+ " \"creation_date\": \"2009-05-04T13:45:37.030\"\n"
+ "}"
);
answerDoc1.setJsonEntity("""
{
"owner": {
"location": "Norfolk, United Kingdom",
"display_name": "Sam",
"id": 48
},
"body": "<p>Unfortunately you're pretty much limited to FTP...",
"qa_join_field": {
"name": "answer",
"parent": "1"
},
"creation_date": "2009-05-04T13:45:37.030"
}""");
client().performRequest(answerDoc1);
Request answerDoc2 = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/_doc/3");
answerDoc2.addParameter("routing", "1");
answerDoc2.setJsonEntity(
"{\n"
+ " \"owner\": {\n"
+ " \"location\": \"Norfolk, United Kingdom\",\n"
+ " \"display_name\": \"Troll\",\n"
+ " \"id\": 49\n"
+ " },\n"
+ " \"body\": \"<p>Use Linux...\",\n"
+ " \"qa_join_field\" : {\n"
+ " \"name\" : \"answer\",\n"
+ " \"parent\" : \"1\"\n"
+ " },\n"
+ " \"creation_date\": \"2009-05-05T13:45:37.030\"\n"
+ "}"
);
answerDoc2.setJsonEntity("""
{
"owner": {
"location": "Norfolk, United Kingdom",
"display_name": "Troll",
"id": 49
},
"body": "<p>Use Linux...",
"qa_join_field": {
"name": "answer",
"parent": "1"
},
"creation_date": "2009-05-05T13:45:37.030"
}""");
client().performRequest(answerDoc2);
client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh"));
@ -948,7 +955,8 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
searchTemplateRequest.setRequest(new SearchRequest("index"));
searchTemplateRequest.setScriptType(ScriptType.INLINE);
searchTemplateRequest.setScript("{ \"query\": { \"match\": { \"num\": {{number}} } } }");
searchTemplateRequest.setScript("""
{ "query": { "match": { "num": {{number}} } } }""");
Map<String, Object> scriptParams = new HashMap<>();
scriptParams.put("number", 10);
@ -1037,7 +1045,8 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
SearchTemplateRequest goodRequest = new SearchTemplateRequest();
goodRequest.setRequest(new SearchRequest("index"));
goodRequest.setScriptType(ScriptType.INLINE);
goodRequest.setScript("{ \"query\": { \"match\": { \"num\": {{number}} } } }");
goodRequest.setScript("""
{ "query": { "match": { "num": {{number}} } } }""");
Map<String, Object> scriptParams = new HashMap<>();
scriptParams.put("number", 10);
goodRequest.setScriptParams(scriptParams);

View file

@ -20,13 +20,12 @@ import java.util.Locale;
public class TextStructureIT extends ESRestHighLevelClientTestCase {
public void testFindFileStructure() throws IOException {
String sample = "{\"logger\":\"controller\",\"timestamp\":1478261151445,\"level\":\"INFO\","
+ "\"pid\":42,\"thread\":\"0x7fff7d2a8000\",\"message\":\"message 1\",\"class\":\"ml\","
+ "\"method\":\"core::SomeNoiseMaker\",\"file\":\"Noisemaker.cc\",\"line\":333}\n"
+ "{\"logger\":\"controller\",\"timestamp\":1478261151445,"
+ "\"level\":\"INFO\",\"pid\":42,\"thread\":\"0x7fff7d2a8000\",\"message\":\"message 2\",\"class\":\"ml\","
+ "\"method\":\"core::SomeNoiseMaker\",\"file\":\"Noisemaker.cc\",\"line\":333}\n";
String sample = """
{"logger":"controller","timestamp":1478261151445,"level":"INFO","pid":42,"thread":"0x7fff7d2a8000","message":"message 1",\
"class":"ml","method":"core::SomeNoiseMaker","file":"Noisemaker.cc","line":333}
{"logger":"controller","timestamp":1478261151445,"level":"INFO","pid":42,"thread":"0x7fff7d2a8000","message":"message 2",\
"class":"ml","method":"core::SomeNoiseMaker","file":"Noisemaker.cc","line":333}
""";
TextStructureClient textStructureClient = highLevelClient().textStructure();

View file

@ -73,11 +73,12 @@ public class WatcherIT extends ESRestHighLevelClientTestCase {
assertThat(putWatchResponse.getVersion(), is(1L));
}
private static final String WATCH_JSON = "{ \n"
+ " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n"
+ " \"input\": { \"none\": {} },\n"
+ " \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n"
+ "}";
private static final String WATCH_JSON = """
{
"trigger": { "schedule": { "interval": "10h" } },
"input": { "none": {} },
"actions": { "logme": { "logging": { "text": "{{ctx.payload}}" } } }
}""";
private PutWatchResponse createWatch(String watchId) throws Exception {
BytesReference bytesReference = new BytesArray(WATCH_JSON);

View file

@ -247,11 +247,12 @@ public class WatcherRequestConvertersTests extends ESTestCase {
}
private static final String WATCH_JSON = "{ \n"
+ " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n"
+ " \"input\": { \"none\": {} },\n"
+ " \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n"
+ "}";
private static final String WATCH_JSON = """
{
"trigger": { "schedule": { "interval": "10h" } },
"input": { "none": {} },
"actions": { "logme": { "logging": { "text": "{{ctx.payload}}" } } }
}""";
public void testExecuteInlineWatchRequest() throws IOException {
boolean ignoreCondition = randomBoolean();

View file

@ -104,7 +104,8 @@ public class InferenceAggIT extends ESRestHighLevelClientTestCase {
private void indexData(String index) throws IOException {
CreateIndexRequest create = new CreateIndexRequest(index);
create.mapping("{\"properties\": {\"fruit\": {\"type\": \"keyword\"}," + "\"cost\": {\"type\": \"double\"}}}", XContentType.JSON);
create.mapping("""
{"properties": {"fruit": {"type": "keyword"}, "cost": {"type": "double"}}}""", XContentType.JSON);
highLevelClient().indices().create(create, RequestOptions.DEFAULT);
BulkRequest bulk = new BulkRequest(index).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
bulk.add(new IndexRequest().source(XContentType.JSON, "fruit", "apple", "cost", "1.2"));

View file

@ -145,11 +145,13 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
//tag::index-request-string
IndexRequest request = new IndexRequest("posts"); // <1>
request.id("1"); // <2>
String jsonString = "{" +
"\"user\":\"kimchy\"," +
"\"postDate\":\"2013-01-30\"," +
"\"message\":\"trying out Elasticsearch\"" +
"}";
String jsonString = """
{
"user": "kimchy",
"postDate": "2013-01-30",
"message": "trying out Elasticsearch"
}
""";
request.source(jsonString, XContentType.JSON); // <3>
//end::index-request-string
@ -787,17 +789,19 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
public void testReindex() throws Exception {
RestHighLevelClient client = highLevelClient();
{
String mapping = " \"properties\": {\n"
+ " \"user\": {\n"
+ " \"type\": \"text\"\n"
+ " },\n"
+ " \"field1\": {\n"
+ " \"type\": \"integer\"\n"
+ " },\n"
+ " \"field2\": {\n"
+ " \"type\": \"integer\"\n"
+ " }\n"
+ " }";
String mapping = """
"properties": {
"user": {
"type": "text"
},
"field1": {
"type": "integer"
},
"field2": {
"type": "integer"
}
}
""";
createIndex("source1", Settings.EMPTY, mapping);
createIndex("source2", Settings.EMPTY, mapping);
createPipeline("my_pipeline");
@ -987,17 +991,19 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
public void testUpdateByQuery() throws Exception {
RestHighLevelClient client = highLevelClient();
{
String mapping = " \"properties\": {\n"
+ " \"user\": {\n"
+ " \"type\": \"text\"\n"
+ " },\n"
+ " \"field1\": {\n"
+ " \"type\": \"integer\"\n"
+ " },\n"
+ " \"field2\": {\n"
+ " \"type\": \"integer\"\n"
+ " }\n"
+ " }";
String mapping = """
"properties": {
"user": {
"type": "text"
},
"field1": {
"type": "integer"
},
"field2": {
"type": "integer"
}
}
""";
createIndex("source1", Settings.EMPTY, mapping);
createIndex("source2", Settings.EMPTY, mapping);
createPipeline("my_pipeline");
@ -1110,17 +1116,18 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
public void testDeleteByQuery() throws Exception {
RestHighLevelClient client = highLevelClient();
{
String mapping = " \"properties\": {\n"
+ " \"user\": {\n"
+ " \"type\": \"text\"\n"
+ " },\n"
+ " \"field1\": {\n"
+ " \"type\": \"integer\"\n"
+ " },\n"
+ " \"field2\": {\n"
+ " \"type\": \"integer\"\n"
+ " }\n"
+ " }";
String mapping = """
"properties": {
"user": {
"type": "text"
},
"field1": {
"type": "integer"
},
"field2": {
"type": "integer"
}
}""";
createIndex("source1", Settings.EMPTY, mapping);
createIndex("source2", Settings.EMPTY, mapping);
}
@ -1222,18 +1229,17 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
RestHighLevelClient client = highLevelClient();
{
Request createIndex = new Request("PUT", "/posts");
createIndex.setJsonEntity(
"{\n"
+ " \"mappings\" : {\n"
+ " \"properties\" : {\n"
+ " \"message\" : {\n"
+ " \"type\": \"text\",\n"
+ " \"store\": true\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ "}"
);
createIndex.setJsonEntity("""
{
"mappings": {
"properties": {
"message": {
"type": "text",
"store": true
}
}
}
}""");
Response response = client().performRequest(createIndex);
assertEquals(200, response.getStatusLine().getStatusCode());
@ -1394,18 +1400,17 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
RestHighLevelClient client = highLevelClient();
{
Request createIndex = new Request("PUT", "/posts");
createIndex.setJsonEntity(
"{\n"
+ " \"mappings\" : {\n"
+ " \"properties\" : {\n"
+ " \"message\" : {\n"
+ " \"type\": \"text\",\n"
+ " \"store\": true\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ "}"
);
createIndex.setJsonEntity("""
{
"mappings": {
"properties": {
"message": {
"type": "text",
"store": true
}
}
}
}""");
Response response = client().performRequest(createIndex);
assertEquals(200, response.getStatusLine().getStatusCode());
@ -1845,18 +1850,17 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
{
Request createIndex = new Request("PUT", "/index");
createIndex.setJsonEntity(
"{\n"
+ " \"mappings\" : {\n"
+ " \"properties\" : {\n"
+ " \"foo\" : {\n"
+ " \"type\": \"text\",\n"
+ " \"store\": true\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ "}"
);
createIndex.setJsonEntity("""
{
"mappings": {
"properties": {
"foo": {
"type": "text",
"store": true
}
}
}
}""");
Response response = client().performRequest(createIndex);
assertEquals(200, response.getStatusLine().getStatusCode());
}

View file

@ -547,13 +547,14 @@ public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase
.put("index.number_of_shards", 3)
.put("index.number_of_replicas", 1)
.build();
String mappingJson = "{\n" +
" \"properties\": {\n" +
" \"message\": {\n" +
" \"type\": \"text\"\n" +
" }\n" +
" }\n" +
"}";
String mappingJson = """
{
"properties": {
"message": {
"type": "text"
}
}
}""";
AliasMetadata twitterAlias = AliasMetadata.builder("twitter_alias").build();
Template template = new Template(settings, new CompressedXContent(mappingJson), Map.of("twitter_alias", twitterAlias)); // <2>
@ -626,13 +627,14 @@ public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase
PutComponentTemplateRequest request = new PutComponentTemplateRequest().name("ct1");
Settings settings = Settings.builder().put("index.number_of_shards", 3).put("index.number_of_replicas", 1).build();
String mappingJson = "{\n"
+ " \"properties\": {\n"
+ " \"message\": {\n"
+ " \"type\": \"text\"\n"
+ " }\n"
+ " }\n"
+ "}";
String mappingJson = """
{
"properties": {
"message": {
"type": "text"
}
}
}""";
AliasMetadata twitterAlias = AliasMetadata.builder("twitter_alias").build();
Template template = new Template(settings, new CompressedXContent(mappingJson), Map.of("twitter_alias", twitterAlias));

View file

@ -33,11 +33,13 @@ public class GraphDocumentationIT extends ESRestHighLevelClientTestCase {
public void indexDocuments() throws IOException {
// Create chain of doc IDs across indices 1->2->3
Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/_doc/1");
doc1.setJsonEntity("{ \"participants\":[1,2], \"text\":\"let's start projectx\", \"attachment_md5\":\"324FHDGHFDG4564\"}");
doc1.setJsonEntity("""
{"participants":[1,2], "text":"let's start projectx", "attachment_md5":"324FHDGHFDG4564"}""");
client().performRequest(doc1);
Request doc2 = new Request(HttpPut.METHOD_NAME, "/index2/_doc/2");
doc2.setJsonEntity("{\"participants\":[2,3,4], \"text\":\"got something you both may be interested in\"}");
doc2.setJsonEntity("""
{"participants":[2,3,4], "text":"got something you both may be interested in"}""");
client().performRequest(doc2);
client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh"));

View file

@ -308,13 +308,14 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
{
// tag::create-index-request-mappings
request.mapping(// <1>
"{\n" +
" \"properties\": {\n" +
" \"message\": {\n" +
" \"type\": \"text\"\n" +
" }\n" +
" }\n" +
"}", // <2>
"""
{
"properties": {
"message": {
"type": "text"
}
}
}""", // <2>
XContentType.JSON);
// end::create-index-request-mappings
CreateIndexResponse createIndexResponse = client.indices().create(request, RequestOptions.DEFAULT);
@ -380,20 +381,21 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
request = new CreateIndexRequest("twitter6");
// tag::create-index-whole-source
request.source("{\n" +
" \"settings\" : {\n" +
" \"number_of_shards\" : 1,\n" +
" \"number_of_replicas\" : 0\n" +
" },\n" +
" \"mappings\" : {\n" +
" \"properties\" : {\n" +
" \"message\" : { \"type\" : \"text\" }\n" +
" }\n" +
" },\n" +
" \"aliases\" : {\n" +
" \"twitter_alias\" : {}\n" +
" }\n" +
"}", XContentType.JSON); // <1>
request.source("""
{
"settings" : {
"number_of_shards" : 1,
"number_of_replicas" : 0
},
"mappings" : {
"properties" : {
"message" : { "type" : "text" }
}
},
"aliases" : {
"twitter_alias" : {}
}
}""", XContentType.JSON); // <1>
// end::create-index-whole-source
// tag::create-index-execute
@ -459,13 +461,14 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
{
// tag::put-mapping-request-source
request.source(
"{\n" +
" \"properties\": {\n" +
" \"message\": {\n" +
" \"type\": \"text\"\n" +
" }\n" +
" }\n" +
"}", // <1>
"""
{
"properties": {
"message": {
"type": "text"
}
}
}""", // <1>
XContentType.JSON);
// end::put-mapping-request-source
AcknowledgedResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT);
@ -680,16 +683,17 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
assertTrue(createIndexResponse.isAcknowledged());
PutMappingRequest request = new PutMappingRequest("twitter");
request.source(
"{\n"
+ " \"properties\": {\n"
+ " \"message\": {\n"
+ " \"type\": \"text\"\n"
+ " },\n"
+ " \"timestamp\": {\n"
+ " \"type\": \"date\"\n"
+ " }\n"
+ " }\n"
+ "}", // <1>
"""
{
"properties": {
"message": {
"type": "text"
},
"timestamp": {
"type": "date"
}
}
}""", // <1>
XContentType.JSON
);
AcknowledgedResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT);
@ -1112,7 +1116,8 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
{
Settings settings = Settings.builder().put("number_of_shards", 3).build();
String mappings = "{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}";
String mappings = """
{"properties":{"field-1":{"type":"integer"}}}""";
CreateIndexRequest createIndexRequest = new CreateIndexRequest("index").settings(settings).mapping(mappings, XContentType.JSON);
CreateIndexResponse createIndexResponse = client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
assertTrue(createIndexResponse.isAcknowledged());
@ -1828,7 +1833,9 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
.put("index.number_of_shards", 4)); // <1>
// end::rollover-index-request-settings
// tag::rollover-index-request-mapping
String mappings = "{\"properties\":{\"field-1\":{\"type\":\"keyword\"}}}";
String mappings = """
{"properties":{"field-1":{"type":"keyword"}}}
""";
request.getCreateIndexRequest().mapping(mappings, XContentType.JSON); // <1>
// end::rollover-index-request-mapping
// tag::rollover-index-request-alias
@ -2007,9 +2014,9 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
}
{
// tag::indices-put-settings-settings-source
request.settings(
"{\"index.number_of_replicas\": \"2\"}"
, XContentType.JSON); // <1>
request.settings("""
{"index.number_of_replicas": "2"}
""", XContentType.JSON); // <1>
// end::indices-put-settings-settings-source
}
@ -2086,13 +2093,14 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
{
// tag::put-template-request-mappings-json
request.mapping(// <1>
"{\n" +
" \"properties\": {\n" +
" \"message\": {\n" +
" \"type\": \"text\"\n" +
" }\n" +
" }\n" +
"}",
"""
{
"properties": {
"message": {
"type": "text"
}
}
}""",
XContentType.JSON);
// end::put-template-request-mappings-json
assertTrue(client.indices().putTemplate(request, LEGACY_TEMPLATE_OPTIONS).isAcknowledged());
@ -2148,27 +2156,28 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
// end::put-template-request-version
// tag::put-template-whole-source
request.source("{\n" +
" \"index_patterns\": [\n" +
" \"log-*\",\n" +
" \"pattern-1\"\n" +
" ],\n" +
" \"order\": 1,\n" +
" \"settings\": {\n" +
" \"number_of_shards\": 1\n" +
" },\n" +
" \"mappings\": {\n" +
" \"properties\": {\n" +
" \"message\": {\n" +
" \"type\": \"text\"\n" +
" }\n" +
" }\n" +
" },\n" +
" \"aliases\": {\n" +
" \"alias-1\": {},\n" +
" \"{index}-alias\": {}\n" +
" }\n" +
"}", XContentType.JSON); // <1>
request.source("""
{
"index_patterns": [
"log-*",
"pattern-1"
],
"order": 1,
"settings": {
"number_of_shards": 1
},
"mappings": {
"properties": {
"message": {
"type": "text"
}
}
},
"aliases": {
"alias-1": {},
"{index}-alias": {}
}
}""", XContentType.JSON); // <1>
// end::put-template-whole-source
// tag::put-template-request-create
@ -2367,13 +2376,14 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
{
// tag::put-index-template-v2-request-mappings-json
String mappingJson = "{\n" +
" \"properties\": {\n" +
" \"message\": {\n" +
" \"type\": \"text\"\n" +
" }\n" +
" }\n" +
"}"; // <1>
String mappingJson = """
{
"properties": {
"message": {
"type": "text"
}
}
}"""; // <1>
PutComposableIndexTemplateRequest request = new PutComposableIndexTemplateRequest()
.name("my-template");
Template template = new Template(null, new CompressedXContent(mappingJson), null); // <2>

View file

@ -62,9 +62,19 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase {
{
// tag::put-pipeline-request
String source =
"{\"description\":\"my set of processors\"," +
"\"processors\":[{\"set\":{\"field\":\"foo\",\"value\":\"bar\"}}]}";
String source = """
{
"description": "my set of processors",
"processors": [
{
"set": {
"field": "foo",
"value": "bar"
}
}
]
}
""";
PutPipelineRequest request = new PutPipelineRequest(
"my-pipeline-id", // <1>
new BytesArray(source.getBytes(StandardCharsets.UTF_8)), // <2>
@ -97,8 +107,11 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase {
RestHighLevelClient client = highLevelClient();
{
String source = "{\"description\":\"my set of processors\","
+ "\"processors\":[{\"set\":{\"field\":\"foo\",\"value\":\"bar\"}}]}";
String source = """
{
"description": "my set of processors",
"processors": [ { "set": { "field": "foo", "value": "bar" } } ]
}""";
PutPipelineRequest request = new PutPipelineRequest(
"my-pipeline-id",
new BytesArray(source.getBytes(StandardCharsets.UTF_8)),
@ -278,17 +291,29 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase {
{
// tag::simulate-pipeline-request
String source =
"{\"" +
"pipeline\":{" +
"\"description\":\"_description\"," +
"\"processors\":[{\"set\":{\"field\":\"field2\",\"value\":\"_value\"}}]" +
"}," +
"\"docs\":[" +
"{\"_index\":\"index\",\"_id\":\"id\",\"_source\":{\"foo\":\"bar\"}}," +
"{\"_index\":\"index\",\"_id\":\"id\",\"_source\":{\"foo\":\"rab\"}}" +
"]" +
"}";
String source = """
{
"pipeline": {
"description": "_description",
"processors": [ { "set": { "field": "field2", "value": "_value" } } ]
},
"docs": [
{
"_index": "index",
"_id": "id",
"_source": {
"foo": "bar"
}
},
{
"_index": "index",
"_id": "id",
"_source": {
"foo": "rab"
}
}
]
}""";
SimulatePipelineRequest request = new SimulatePipelineRequest(
new BytesArray(source.getBytes(StandardCharsets.UTF_8)), // <1>
XContentType.JSON // <2>
@ -335,16 +360,29 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase {
RestHighLevelClient client = highLevelClient();
{
String source = "{\""
+ "pipeline\":{"
+ "\"description\":\"_description\","
+ "\"processors\":[{\"set\":{\"field\":\"field2\",\"value\":\"_value\"}}]"
+ "},"
+ "\"docs\":["
+ "{\"_index\":\"index\",\"_id\":\"id\",\"_source\":{\"foo\":\"bar\"}},"
+ "{\"_index\":\"index\",\"_id\":\"id\",\"_source\":{\"foo\":\"rab\"}}"
+ "]"
+ "}";
String source = """
{
"pipeline": {
"description": "_description",
"processors": [ { "set": { "field": "field2", "value": "_value" } } ]
},
"docs": [
{
"_index": "index",
"_id": "id",
"_source": {
"foo": "bar"
}
},
{
"_index": "index",
"_id": "id",
"_source": {
"foo": "rab"
}
}
]
}""";
SimulatePipelineRequest request = new SimulatePipelineRequest(
new BytesArray(source.getBytes(StandardCharsets.UTF_8)),
XContentType.JSON

View file

@ -64,16 +64,26 @@ public class LicensingDocumentationIT extends ESRestHighLevelClientTestCase {
public void testLicense() throws Exception {
RestHighLevelClient client = highLevelClient();
String license = "{\"license\": {\"uid\":\"893361dc-9749-4997-93cb-802e3d7fa4a8\",\"type\":\"gold\","
+ "\"issue_date_in_millis\":1411948800000,\"expiry_date_in_millis\":1914278399999,\"max_nodes\":1,\"issued_to\":\"issued_to\","
+ "\"issuer\":\"issuer\",\"signature\":\"AAAAAgAAAA3U8+YmnvwC+CWsV/mRAAABmC9ZN0hjZDBGYnVyRXpCOW5Bb3FjZDAxOWpSbTVoMVZwUzRxVk1PSm"
+ "kxakxZdW5IMlhlTHNoN1N2MXMvRFk4d3JTZEx3R3RRZ0pzU3lobWJKZnQvSEFva0ppTHBkWkprZWZSQi9iNmRQNkw1SlpLN0lDalZCS095MXRGN1lIZlpYcVVTTn"
+ "FrcTE2dzhJZmZrdFQrN3JQeGwxb0U0MXZ0dDJHSERiZTVLOHNzSDByWnpoZEphZHBEZjUrTVBxRENNSXNsWWJjZllaODdzVmEzUjNiWktNWGM5TUhQV2plaUo4Q1"
+ "JOUml4MXNuL0pSOEhQaVB2azhmUk9QVzhFeTFoM1Q0RnJXSG53MWk2K055c28zSmRnVkF1b2JSQkFLV2VXUmVHNDZ2R3o2VE1qbVNQS2lxOHN5bUErZlNIWkZSVm"
+ "ZIWEtaSU9wTTJENDVvT1NCYklacUYyK2FwRW9xa0t6dldMbmMzSGtQc3FWOTgzZ3ZUcXMvQkt2RUZwMFJnZzlvL2d2bDRWUzh6UG5pdENGWFRreXNKNkE9PQAAAQ"
+ "Be8GfzDm6T537Iuuvjetb3xK5dvg0K5NQapv+rczWcQFxgCuzbF8plkgetP1aAGZP4uRESDQPMlOCsx4d0UqqAm9f7GbBQ3l93P+PogInPFeEH9NvOmaAQovmxVM"
+ "9SE6DsDqlX4cXSO+bgWpXPTd2LmpoQc1fXd6BZ8GeuyYpVHVKp9hVU0tAYjw6HzYOE7+zuO1oJYOxElqy66AnIfkvHrvni+flym3tE7tDTgsDRaz7W3iBhaqiSnt"
+ "EqabEkvHdPHQdSR99XGaEvnHO1paK01/35iZF6OXHsF7CCj+558GRXiVxzueOe7TsGSSt8g7YjZwV9bRCyU7oB4B/nidgI\"}}";
String license = """
{
"license": {
"uid": "893361dc-9749-4997-93cb-802e3d7fa4a8",
"type": "gold",
"issue_date_in_millis": 1411948800000,
"expiry_date_in_millis": 1914278399999,
"max_nodes": 1,
"issued_to": "issued_to",
"issuer": "issuer",
"signature": "AAAAAgAAAA3U8+YmnvwC+CWsV/mRAAABmC9ZN0hjZDBGYnVyRXpCOW5Bb3FjZDAxOWpSbTVoMVZwUzRxVk1PSmkxakxZdW5IMlhlTHNoN1N2\
MXMvRFk4d3JTZEx3R3RRZ0pzU3lobWJKZnQvSEFva0ppTHBkWkprZWZSQi9iNmRQNkw1SlpLN0lDalZCS095MXRGN1lIZlpYcVVTTnFrcTE2dzhJZmZrdFQrN3JQeG\
wxb0U0MXZ0dDJHSERiZTVLOHNzSDByWnpoZEphZHBEZjUrTVBxRENNSXNsWWJjZllaODdzVmEzUjNiWktNWGM5TUhQV2plaUo4Q1JOUml4MXNuL0pSOEhQaVB2azhm\
Uk9QVzhFeTFoM1Q0RnJXSG53MWk2K055c28zSmRnVkF1b2JSQkFLV2VXUmVHNDZ2R3o2VE1qbVNQS2lxOHN5bUErZlNIWkZSVmZIWEtaSU9wTTJENDVvT1NCYklacU\
YyK2FwRW9xa0t6dldMbmMzSGtQc3FWOTgzZ3ZUcXMvQkt2RUZwMFJnZzlvL2d2bDRWUzh6UG5pdENGWFRreXNKNkE9PQAAAQBe8GfzDm6T537Iuuvjetb3xK5dvg0K\
5NQapv+rczWcQFxgCuzbF8plkgetP1aAGZP4uRESDQPMlOCsx4d0UqqAm9f7GbBQ3l93P+PogInPFeEH9NvOmaAQovmxVM9SE6DsDqlX4cXSO+bgWpXPTd2LmpoQc1\
fXd6BZ8GeuyYpVHVKp9hVU0tAYjw6HzYOE7+zuO1oJYOxElqy66AnIfkvHrvni+flym3tE7tDTgsDRaz7W3iBhaqiSntEqabEkvHdPHQdSR99XGaEvnHO1paK01/35\
iZF6OXHsF7CCj+558GRXiVxzueOe7TsGSSt8g7YjZwV9bRCyU7oB4B/nidgI"
}
}""";
{
//tag::put-license-execute
PutLicenseRequest request = new PutLicenseRequest();

View file

@ -1207,11 +1207,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
// Let us index a bucket
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source(
"{\"job_id\":\"test-get-buckets\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000,"
+ "\"bucket_span\": 600,\"is_interim\": false, \"anomaly_score\": 80.0}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "test-get-buckets",
"result_type": "bucket",
"timestamp": 1533081600000,
"bucket_span": 600,
"is_interim": false,
"anomaly_score": 80
}""", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
@ -1588,20 +1592,28 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
{
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.source(
"{\"job_id\":\"test-get-overall-buckets-1\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000,"
+ "\"bucket_span\": 600,\"is_interim\": false, \"anomaly_score\": 60.0}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "test-get-overall-buckets-1",
"result_type": "bucket",
"timestamp": 1533081600000,
"bucket_span": 600,
"is_interim": false,
"anomaly_score": 60
}""", XContentType.JSON);
bulkRequest.add(indexRequest);
}
{
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.source(
"{\"job_id\":\"test-get-overall-buckets-2\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000,"
+ "\"bucket_span\": 3600,\"is_interim\": false, \"anomaly_score\": 100.0}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "test-get-overall-buckets-2",
"result_type": "bucket",
"timestamp": 1533081600000,
"bucket_span": 3600,
"is_interim": false,
"anomaly_score": 100
}""", XContentType.JSON);
bulkRequest.add(indexRequest);
}
@ -1689,11 +1701,15 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
// Let us index a record
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source(
"{\"job_id\":\"test-get-records\", \"result_type\":\"record\", \"timestamp\": 1533081600000,"
+ "\"bucket_span\": 600,\"is_interim\": false, \"record_score\": 80.0}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "test-get-records",
"result_type": "record",
"timestamp": 1533081600000,
"bucket_span": 600,
"is_interim": false,
"record_score": 80
}""", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
@ -1858,12 +1874,17 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
// Let us index a record
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source(
"{\"job_id\":\"test-get-influencers\", \"result_type\":\"influencer\", \"timestamp\": 1533081600000,"
+ "\"bucket_span\": 600,\"is_interim\": false, \"influencer_score\": 80.0, \"influencer_field_name\": \"my_influencer\","
+ "\"influencer_field_value\":\"foo\"}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "test-get-influencers",
"result_type": "influencer",
"timestamp": 1533081600000,
"bucket_span": 600,
"is_interim": false,
"influencer_score": 80,
"influencer_field_name": "my_influencer",
"influencer_field_value": "foo"
}""", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
@ -2080,23 +2101,29 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
// Let us index a snapshot
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source(
"{\"job_id\":\""
+ jobId
+ "\", \"timestamp\":1541587919000, "
+ "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", "
+ "\"snapshot_id\":\""
+ snapshotId
+ "\", \"snapshot_doc_count\":1, \"model_size_stats\":{"
+ "\"job_id\":\""
+ jobId
+ "\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, "
+ "\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2,"
+ "\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, "
+ "\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000,"
+ "\"latest_result_time_stamp\":1519930800000, \"retain\":false}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "%s",
"timestamp": 1541587919000,
"description": "State persisted due to job close at 2018-11-07T10:51:59+0000",
"snapshot_id": "%s",
"snapshot_doc_count": 1,
"model_size_stats": {
"job_id": "%s",
"result_type": "model_size_stats",
"model_bytes": 51722,
"total_by_field_count": 3,
"total_over_field_count": 0,
"total_partition_field_count": 2,
"bucket_allocation_failures_count": 0,
"memory_status": "ok",
"log_time": 1541587919000,
"timestamp": 1519930800000
},
"latest_record_time_stamp": 1519931700000,
"latest_result_time_stamp": 1519930800000,
"retain": false
}""".formatted(jobId, snapshotId, jobId), XContentType.JSON);
{
client.index(indexRequest, RequestOptions.DEFAULT);
@ -2155,17 +2182,29 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
// Let us index a snapshot
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source(
"{\"job_id\":\"test-get-model-snapshots\", \"timestamp\":1541587919000, "
+ "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", "
+ "\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{"
+ "\"job_id\":\"test-get-model-snapshots\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, "
+ "\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2,"
+ "\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, "
+ "\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000,"
+ "\"latest_result_time_stamp\":1519930800000, \"retain\":false}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "test-get-model-snapshots",
"timestamp": 1541587919000,
"description": "State persisted due to job close at 2018-11-07T10:51:59+0000",
"snapshot_id": "1541587919",
"snapshot_doc_count": 1,
"model_size_stats": {
"job_id": "test-get-model-snapshots",
"result_type": "model_size_stats",
"model_bytes": 51722,
"total_by_field_count": 3,
"total_over_field_count": 0,
"total_partition_field_count": 2,
"bucket_allocation_failures_count": 0,
"memory_status": "ok",
"log_time": 1541587919000,
"timestamp": 1519930800000
},
"latest_record_time_stamp": 1519931700000,
"latest_result_time_stamp": 1519930800000,
"retain": false
}""", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
@ -2256,19 +2295,34 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
String documentId = jobId + "_model_snapshot_" + snapshotId;
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared").id(documentId);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source(
"{\"job_id\":\"test-revert-model-snapshot\", \"timestamp\":1541587919000, "
+ "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", "
+ "\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{"
+ "\"job_id\":\"test-revert-model-snapshot\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, "
+ "\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2,"
+ "\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, "
+ "\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000,"
+ "\"latest_result_time_stamp\":1519930800000, \"retain\":false, "
+ "\"quantiles\":{\"job_id\":\"test-revert-model-snapshot\", \"timestamp\":1541587919000, "
+ "\"quantile_state\":\"state\"}}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "test-revert-model-snapshot",
"timestamp": 1541587919000,
"description": "State persisted due to job close at 2018-11-07T10:51:59+0000",
"snapshot_id": "1541587919",
"snapshot_doc_count": 1,
"model_size_stats": {
"job_id": "test-revert-model-snapshot",
"result_type": "model_size_stats",
"model_bytes": 51722,
"total_by_field_count": 3,
"total_over_field_count": 0,
"total_partition_field_count": 2,
"bucket_allocation_failures_count": 0,
"memory_status": "ok",
"log_time": 1541587919000,
"timestamp": 1519930800000
},
"latest_record_time_stamp": 1519931700000,
"latest_result_time_stamp": 1519930800000,
"retain": false,
"quantiles": {
"job_id": "test-revert-model-snapshot",
"timestamp": 1541587919000,
"quantile_state": "state"
}
}""", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
@ -2334,19 +2388,34 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
String documentId = jobId + "_model_snapshot_" + snapshotId;
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared").id(documentId);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source(
"{\"job_id\":\"test-upgrade-job-model-snapshot\", \"timestamp\":1541587919000, "
+ "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", "
+ "\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{"
+ "\"job_id\":\"test-revert-model-snapshot\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, "
+ "\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2,"
+ "\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, "
+ "\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000,"
+ "\"latest_result_time_stamp\":1519930800000, \"retain\":false, "
+ "\"quantiles\":{\"job_id\":\"test-revert-model-snapshot\", \"timestamp\":1541587919000, "
+ "\"quantile_state\":\"state\"}}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "test-upgrade-job-model-snapshot",
"timestamp": 1541587919000,
"description": "State persisted due to job close at 2018-11-07T10:51:59+0000",
"snapshot_id": "1541587919",
"snapshot_doc_count": 1,
"model_size_stats": {
"job_id": "test-revert-model-snapshot",
"result_type": "model_size_stats",
"model_bytes": 51722,
"total_by_field_count": 3,
"total_over_field_count": 0,
"total_partition_field_count": 2,
"bucket_allocation_failures_count": 0,
"memory_status": "ok",
"log_time": 1541587919000,
"timestamp": 1519930800000
},
"latest_record_time_stamp": 1519931700000,
"latest_result_time_stamp": 1519930800000,
"retain": false,
"quantiles": {
"job_id": "test-revert-model-snapshot",
"timestamp": 1541587919000,
"quantile_state": "state"
}
}""", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
@ -2415,17 +2484,29 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
// Let us index a snapshot
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared").id(documentId);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source(
"{\"job_id\":\"test-update-model-snapshot\", \"timestamp\":1541587919000, "
+ "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", "
+ "\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{"
+ "\"job_id\":\"test-update-model-snapshot\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, "
+ "\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2,"
+ "\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, "
+ "\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000,"
+ "\"latest_result_time_stamp\":1519930800000, \"retain\":false}",
XContentType.JSON
);
indexRequest.source("""
{
"job_id": "test-update-model-snapshot",
"timestamp": 1541587919000,
"description": "State persisted due to job close at 2018-11-07T10:51:59+0000",
"snapshot_id": "1541587919",
"snapshot_doc_count": 1,
"model_size_stats": {
"job_id": "test-update-model-snapshot",
"result_type": "model_size_stats",
"model_bytes": 51722,
"total_by_field_count": 3,
"total_over_field_count": 0,
"total_partition_field_count": 2,
"bucket_allocation_failures_count": 0,
"memory_status": "ok",
"log_time": 1541587919000,
"timestamp": 1519930800000
},
"latest_record_time_stamp": 1519931700000,
"latest_result_time_stamp": 1519930800000,
"retain": false
}""", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{

View file

@ -821,10 +821,11 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
request.setScriptType(ScriptType.INLINE);
request.setScript( // <2>
"{" +
" \"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" } }," +
" \"size\" : \"{{size}}\"" +
"}");
"""
{
"query": { "match": { "{{field}}": "{{value}}" } },
"size": "{{size}}"
}""");
Map<String, Object> scriptParams = new HashMap<>();
scriptParams.put("field", "title");
@ -931,10 +932,11 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
request.setScriptType(ScriptType.INLINE);
request.setScript(
"{" +
" \"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" } }," +
" \"size\" : \"{{size}}\"" +
"}");
"""
{
"query": { "match" : { "{{field}}" : "{{value}}" } },
"size" : "{{size}}"
}""");
Map<String, Object> scriptParams = new HashMap<>();
scriptParams.put("field", "title");
@ -1036,16 +1038,16 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
protected void registerQueryScript(RestClient restClient) throws IOException {
// tag::register-script
Request scriptRequest = new Request("POST", "_scripts/title_search");
scriptRequest.setJsonEntity(
"{" +
" \"script\": {" +
" \"lang\": \"mustache\"," +
" \"source\": {" +
" \"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" } }," +
" \"size\" : \"{{size}}\"" +
" }" +
" }" +
"}");
scriptRequest.setJsonEntity("""
{
"script": {
"lang": "mustache",
"source": {
"query": { "match": { "{{field}}": "{{value}}" } },
"size": "{{size}}"
}
}
}""");
Response scriptResponse = restClient.performRequest(scriptRequest);
// end::register-script
assertEquals(RestStatus.OK.getStatus(), scriptResponse.getStatusLine().getStatusCode());

View file

@ -133,8 +133,9 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
}
{
// tag::create-repository-settings-source
request.settings("{\"location\": \".\", \"compress\": \"true\"}",
XContentType.JSON); // <1>
request.settings("""
{"location": ".", "compress": "true"}
""", XContentType.JSON); // <1>
// end::create-repository-settings-source
}

View file

@ -191,13 +191,14 @@ public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase
// tag::put-stored-script-request
PutStoredScriptRequest request = new PutStoredScriptRequest();
request.id("id"); // <1>
request.content(new BytesArray(
"{\n" +
"\"script\": {\n" +
"\"lang\": \"painless\",\n" +
"\"source\": \"Math.log(_score * 2) + params.multiplier\"" +
"}\n" +
"}\n"
request.content(new BytesArray("""
{
"script": {
"lang": "painless",
"source": "Math.log(_score * 2) + params.multiplier"
}
}
"""
), XContentType.JSON); // <2>
// end::put-stored-script-request
@ -282,7 +283,8 @@ public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase
builder.startObject("script");
{
builder.field("lang", "mustache");
builder.field("source", "{\"query\":{\"match\":{\"title\":\"{{query_string}}\"}}}");
builder.field("source", """
{"query":{"match":{"title":"{{query_string}}"}}}""");
}
builder.endObject();
}
@ -294,7 +296,8 @@ public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase
Map<String, Object> script = getAsMap("/_scripts/id");
assertThat(extractValue("script.lang", script), equalTo("mustache"));
assertThat(extractValue("script.source", script), equalTo("{\"query\":{\"match\":{\"title\":\"{{query_string}}\"}}}"));
assertThat(extractValue("script.source", script), equalTo("""
{"query":{"match":{"title":"{{query_string}}"}}}"""));
}
}

View file

@ -30,12 +30,12 @@ public class TextStructureClientDocumentationIT extends ESRestHighLevelClientTes
RestHighLevelClient client = highLevelClient();
Path anInterestingFile = createTempFile();
String contents = "{\"logger\":\"controller\",\"timestamp\":1478261151445,\"level\":\"INFO\","
+ "\"pid\":42,\"thread\":\"0x7fff7d2a8000\",\"message\":\"message 1\",\"class\":\"ml\","
+ "\"method\":\"core::SomeNoiseMaker\",\"file\":\"Noisemaker.cc\",\"line\":333}\n"
+ "{\"logger\":\"controller\",\"timestamp\":1478261151445,"
+ "\"level\":\"INFO\",\"pid\":42,\"thread\":\"0x7fff7d2a8000\",\"message\":\"message 2\",\"class\":\"ml\","
+ "\"method\":\"core::SomeNoiseMaker\",\"file\":\"Noisemaker.cc\",\"line\":333}\n";
String contents = """
{"logger":"controller","timestamp":1478261151445,"level":"INFO","pid":42,"thread":"0x7fff7d2a8000","message":"message 1",\
"class":"ml","method":"core::SomeNoiseMaker","file":"Noisemaker.cc","line":333}
{"logger":"controller","timestamp":1478261151445,"level":"INFO","pid":42,"thread":"0x7fff7d2a8000","message":"message 2",\
"class":"ml","method":"core::SomeNoiseMaker","file":"Noisemaker.cc","line":333}
""";
Files.write(anInterestingFile, Collections.singleton(contents), StandardCharsets.UTF_8);
{

View file

@ -144,11 +144,12 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase {
{
//tag::x-pack-put-watch-execute
// you can also use the WatchSourceBuilder from org.elasticsearch.plugin:x-pack-core to create a watch programmatically
BytesReference watch = new BytesArray("{ \n" +
" \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" +
" \"input\": { \"simple\": { \"foo\" : \"bar\" } },\n" +
" \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" +
"}");
BytesReference watch = new BytesArray("""
{
"trigger": { "schedule": { "interval": "10h" } },
"input": { "simple": { "foo" : "bar" } },
"actions": { "logme": { "logging": { "text": "{{ctx.payload}}" } } }
}""");
PutWatchRequest request = new PutWatchRequest("my_watch_id", watch, XContentType.JSON);
request.setActive(false); // <1>
PutWatchResponse response = client.watcher().putWatch(request, RequestOptions.DEFAULT);
@ -162,13 +163,12 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase {
}
{
BytesReference watch = new BytesArray(
"{ \n"
+ " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n"
+ " \"input\": { \"simple\": { \"foo\" : \"bar\" } },\n"
+ " \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n"
+ "}"
);
BytesReference watch = new BytesArray("""
{
"trigger": { "schedule": { "interval": "10h" } },
"input": { "simple": { "foo" : "bar" } },
"actions": { "logme": { "logging": { "text": "{{ctx.payload}}" } } }
}""");
PutWatchRequest request = new PutWatchRequest("my_other_watch_id", watch, XContentType.JSON);
// tag::x-pack-put-watch-execute-listener
ActionListener<PutWatchResponse> listener = new ActionListener<PutWatchResponse>() {
@ -332,11 +332,12 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase {
{
// tag::x-pack-execute-watch-inline
String watchJson = "{ \n" +
" \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" +
" \"input\": { \"none\": {} },\n" +
" \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" +
"}";
String watchJson = """
{
"trigger": { "schedule": { "interval": "10h" } },
"input": { "none": {} },
"actions": { "logme": { "logging": { "text": "{{ctx.payload}}" } } }
}""";
ExecuteWatchRequest request = ExecuteWatchRequest.inline(watchJson);
request.setAlternativeInput("{ \"foo\" : \"bar\" }"); // <1>
request.setActionMode("action1", ExecuteWatchRequest.ActionExecutionMode.SIMULATE); // <2>
@ -354,11 +355,12 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase {
}
{
String watchJson = "{ \n"
+ " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n"
+ " \"input\": { \"none\": {} },\n"
+ " \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n"
+ "}";
String watchJson = """
{
"trigger": { "schedule": { "interval": "10h" } },
"input": { "none": {} },
"actions": { "logme": { "logging": { "text": "{{ctx.payload}}" } } }
}""";
ExecuteWatchRequest request = ExecuteWatchRequest.inline(watchJson);
// tag::x-pack-execute-watch-inline-execute-listener
ActionListener<ExecuteWatchResponse> listener = new ActionListener<ExecuteWatchResponse>() {
@ -390,13 +392,12 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase {
RestHighLevelClient client = highLevelClient();
{
BytesReference watch = new BytesArray(
"{ \n"
+ " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n"
+ " \"input\": { \"simple\": { \"foo\" : \"bar\" } },\n"
+ " \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n"
+ "}"
);
BytesReference watch = new BytesArray("""
{
"trigger": { "schedule": { "interval": "10h" } },
"input": { "simple": { "foo" : "bar" } },
"actions": { "logme": { "logging": { "text": "{{ctx.payload}}" } } }
}""");
PutWatchRequest putWatchRequest = new PutWatchRequest("my_watch_id", watch, XContentType.JSON);
client.watcher().putWatch(putWatchRequest, RequestOptions.DEFAULT);
@ -458,13 +459,12 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase {
RestHighLevelClient client = highLevelClient();
{
BytesReference watch = new BytesArray(
"{ \n"
+ " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n"
+ " \"input\": { \"simple\": { \"foo\" : \"bar\" } },\n"
+ " \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n"
+ "}"
);
BytesReference watch = new BytesArray("""
{
"trigger": { "schedule": { "interval": "10h" } },
"input": { "simple": { "foo" : "bar" } },
"actions": { "logme": { "logging": { "text": "{{ctx.payload}}" } } }
}""");
PutWatchRequest putWatchRequest = new PutWatchRequest("my_watch_id", watch, XContentType.JSON);
client.watcher().putWatch(putWatchRequest, RequestOptions.DEFAULT);
}
@ -510,13 +510,12 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase {
RestHighLevelClient client = highLevelClient();
{
BytesReference watch = new BytesArray(
"{ \n"
+ " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n"
+ " \"input\": { \"simple\": { \"foo\" : \"bar\" } },\n"
+ " \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n"
+ "}"
);
BytesReference watch = new BytesArray("""
{
"trigger": { "schedule": { "interval": "10h" } },
"input": { "simple": { "foo" : "bar" } },
"actions": { "logme": { "logging": { "text": "{{ctx.payload}}" } } }
}""");
PutWatchRequest request = new PutWatchRequest("my_watch_id", watch, XContentType.JSON);
request.setActive(false); // <1>
PutWatchResponse response = client.watcher().putWatch(request, RequestOptions.DEFAULT);

View file

@ -43,7 +43,8 @@ import static org.hamcrest.Matchers.equalTo;
public class GetIndexTemplatesResponseTests extends ESTestCase {
static final String mappingString = "{\"properties\":{\"f1\": {\"type\":\"text\"},\"f2\": {\"type\":\"keyword\"}}}";
static final String mappingString = """
{"properties":{"f1": {"type":"text"},"f2": {"type":"keyword"}}}""";
public void testFromXContent() throws IOException {
xContentTester(

View file

@ -59,10 +59,8 @@ public class PostDataRequestTests extends AbstractXContentTestCase<PostDataReque
PostDataRequest request = new PostDataRequest(jobId, builder);
assertEquals(
"{\"entry1\":\"value1\",\"entry2\":\"value2\"}{\"entry3\":\"value3\"}{\"entry4\":\"value4\"}",
request.getContent().utf8ToString()
);
assertEquals("""
{"entry1":"value1","entry2":"value2"}{"entry3":"value3"}{"entry4":"value4"}""", request.getContent().utf8ToString());
assertEquals(XContentType.JSON, request.getXContentType());
assertEquals(jobId, request.getJobId());
}

View file

@ -32,26 +32,27 @@ public class PreviewDatafeedResponseTests extends ESTestCase {
}
public void testGetDataList() throws IOException {
String rawData = "[\n"
+ " {\n"
+ " \"time\": 1454803200000,\n"
+ " \"airline\": \"JZA\",\n"
+ " \"doc_count\": 5,\n"
+ " \"responsetime\": 990.4628295898438\n"
+ " },\n"
+ " {\n"
+ " \"time\": 1454803200000,\n"
+ " \"airline\": \"JBU\",\n"
+ " \"doc_count\": 23,\n"
+ " \"responsetime\": 877.5927124023438\n"
+ " },\n"
+ " {\n"
+ " \"time\": 1454803200000,\n"
+ " \"airline\": \"KLM\",\n"
+ " \"doc_count\": 42,\n"
+ " \"responsetime\": 1355.481201171875\n"
+ " }\n"
+ "]";
String rawData = """
[
{
"time": 1454803200000,
"airline": "JZA",
"doc_count": 5,
"responsetime": 990.4628295898438
},
{
"time": 1454803200000,
"airline": "JBU",
"doc_count": 23,
"responsetime": 877.5927124023438
},
{
"time": 1454803200000,
"airline": "KLM",
"doc_count": 42,
"responsetime": 1355.481201171875
}
]""";
BytesReference bytes = new BytesArray(rawData);
PreviewDatafeedResponse response = new PreviewDatafeedResponse(bytes);
assertThat(

View file

@ -140,14 +140,15 @@ public class DatafeedConfigTests extends AbstractXContentTestCase<DatafeedConfig
return false;
}
private static final String FUTURE_DATAFEED = "{\n"
+ " \"datafeed_id\": \"farequote-datafeed\",\n"
+ " \"job_id\": \"farequote\",\n"
+ " \"frequency\": \"1h\",\n"
+ " \"indices\": [\"farequote1\", \"farequote2\"],\n"
+ " \"tomorrows_technology_today\": \"amazing\",\n"
+ " \"scroll_size\": 1234\n"
+ "}";
private static final String FUTURE_DATAFEED = """
{
"datafeed_id": "farequote-datafeed",
"job_id": "farequote",
"frequency": "1h",
"indices": ["farequote1", "farequote2"],
"tomorrows_technology_today": "amazing",
"scroll_size": 1234
}""";
public void testFutureMetadataParse() throws IOException {
XContentParser parser = XContentFactory.xContent(XContentType.JSON)

View file

@ -65,8 +65,8 @@ public class NamedXContentObjectHelperTests extends ESTestCase {
}
public void testSerializeInOrder() throws IOException {
String expected =
"{\"my_objects\":[{\"my_named_object\":{\"my_field\":\"value1\"}},{\"my_named_object\":{\"my_field\":\"value2\"}}]}";
String expected = """
{"my_objects":[{"my_named_object":{"my_field":"value1"}},{"my_named_object":{"my_field":"value2"}}]}""";
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.startObject();
List<NamedXContentObject> objects = Arrays.asList(new NamedTestObject("value1"), new NamedTestObject("value2"));
@ -77,7 +77,8 @@ public class NamedXContentObjectHelperTests extends ESTestCase {
}
public void testSerialize() throws IOException {
String expected = "{\"my_objects\":{\"my_named_object\":{\"my_field\":\"value1\"},\"my_named_object\":{\"my_field\":\"value2\"}}}";
String expected = """
{"my_objects":{"my_named_object":{"my_field":"value1"},"my_named_object":{"my_field":"value2"}}}""";
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.startObject();
List<NamedXContentObject> objects = Arrays.asList(new NamedTestObject("value1"), new NamedTestObject("value2"));

View file

@ -28,20 +28,21 @@ import java.util.List;
public class JobTests extends AbstractXContentTestCase<Job> {
private static final String FUTURE_JOB = "{\n"
+ " \"job_id\": \"farequote\",\n"
+ " \"create_time\": 1234567890000,\n"
+ " \"tomorrows_technology_today\": \"wow\",\n"
+ " \"analysis_config\": {\n"
+ " \"bucket_span\": \"1h\",\n"
+ " \"something_new\": \"gasp\",\n"
+ " \"detectors\": [{\"function\": \"metric\", \"field_name\": \"responsetime\", \"by_field_name\": \"airline\"}]\n"
+ " },\n"
+ " \"data_description\": {\n"
+ " \"time_field\": \"time\",\n"
+ " \"the_future\": 123\n"
+ " }\n"
+ "}";
private static final String FUTURE_JOB = """
{
"job_id": "farequote",
"create_time": 1234567890000,
"tomorrows_technology_today": "wow",
"analysis_config": {
"bucket_span": "1h",
"something_new": "gasp",
"detectors": [{"function": "metric", "field_name": "responsetime", "by_field_name": "airline"}]
},
"data_description": {
"time_field": "time",
"the_future": 123
}
}""";
@Override
protected Job createTestInstance() {

View file

@ -8,8 +8,6 @@
package org.elasticsearch.client.security;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.DeprecationHandler;
import org.elasticsearch.xcontent.NamedXContentRegistry;
@ -26,16 +24,16 @@ import static org.hamcrest.Matchers.hasSize;
public class ClearRealmCacheResponseTests extends ESTestCase {
public void testParseFromXContent() throws IOException {
final ElasticsearchException exception = new ElasticsearchException("test");
final String nodesHeader = "\"_nodes\": { \"total\": 2, \"successful\": 1, \"failed\": 1, \"failures\": [ "
+ Strings.toString(exception)
+ "] },";
final String clusterName = "\"cluster_name\": \"cn\",";
try (
XContentParser parser = JsonXContent.jsonXContent.createParser(
NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
"{" + nodesHeader + clusterName + "\"nodes\" : {} }"
"""
{
"_nodes": { "total": 2, "successful": 1, "failed": 1, "failures": [ {"type":"exception","reason":"test"}] },
"cluster_name": "cn",
"nodes" : {}
}"""
)
) {
@ -54,7 +52,12 @@ public class ClearRealmCacheResponseTests extends ESTestCase {
XContentParser parser = JsonXContent.jsonXContent.createParser(
NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
"{" + nodesHeader + clusterName + "\"nodes\" : { \"id1\": { \"name\": \"a\"}, \"id2\": { \"name\": \"b\"}}}"
"""
{
"_nodes": { "total": 2, "successful": 1, "failed": 1, "failures": [ {"type":"exception","reason":"test"}] },
"cluster_name": "cn",
"nodes" : { "id1": { "name": "a"}, "id2": { "name": "b"}}
}"""
)
) {

View file

@ -27,10 +27,10 @@ public class ClearRolesCacheResponseTests extends ESTestCase {
public void testParseFromXContent() throws IOException {
final ElasticsearchException exception = new ElasticsearchException("test");
final String nodesHeader = "\"_nodes\": { \"total\": 2, \"successful\": 1, \"failed\": 1, \"failures\": [ "
+ Strings.toString(exception)
+ "] },";
final String clusterName = "\"cluster_name\": \"cn\",";
final String nodesHeader = """
"_nodes": { "total": 2, "successful": 1, "failed": 1, "failures": [ %s] },""".formatted(Strings.toString(exception));
final String clusterName = """
"cluster_name": "cn",""";
try (
XContentParser parser = JsonXContent.jsonXContent.createParser(
NamedXContentRegistry.EMPTY,
@ -54,7 +54,9 @@ public class ClearRolesCacheResponseTests extends ESTestCase {
XContentParser parser = JsonXContent.jsonXContent.createParser(
NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
"{" + nodesHeader + clusterName + "\"nodes\" : { \"id1\": { \"name\": \"a\"}, \"id2\": { \"name\": \"b\"}}}"
"""
{%s%s"nodes" : { "id1": { "name": "a"}, "id2": { "name": "b"}}}
""".formatted(nodesHeader, clusterName)
)
) {

View file

@ -23,10 +23,8 @@ public class CreateTokenRequestTests extends ESTestCase {
assertThat(new String(request.getPassword()), equalTo("top secret password"));
assertThat(request.getScope(), nullValue());
assertThat(request.getRefreshToken(), nullValue());
assertThat(
Strings.toString(request),
equalTo("{\"grant_type\":\"password\",\"username\":\"jsmith\",\"password\":\"top secret password\"}")
);
assertThat(Strings.toString(request), equalTo("""
{"grant_type":"password","username":"jsmith","password":"top secret password"}"""));
}
public void testCreateTokenFromRefreshToken() {
@ -36,10 +34,8 @@ public class CreateTokenRequestTests extends ESTestCase {
assertThat(request.getScope(), nullValue());
assertThat(request.getUsername(), nullValue());
assertThat(request.getPassword(), nullValue());
assertThat(
Strings.toString(request),
equalTo("{\"grant_type\":\"refresh_token\",\"refresh_token\":\"9a7f41cf-9918-4d1f-bfaa-ad3f8f9f02b9\"}")
);
assertThat(Strings.toString(request), equalTo("""
{"grant_type":"refresh_token","refresh_token":"9a7f41cf-9918-4d1f-bfaa-ad3f8f9f02b9"}"""));
}
public void testCreateTokenFromClientCredentials() {
@ -60,7 +56,8 @@ public class CreateTokenRequestTests extends ESTestCase {
assertThat(request.getPassword(), nullValue());
assertThat(request.getRefreshToken(), nullValue());
assertThat(new String(request.getKerberosTicket()), equalTo("top secret kerberos ticket"));
assertThat(Strings.toString(request), equalTo("{\"grant_type\":\"_kerberos\",\"kerberos_ticket\":\"top secret kerberos ticket\"}"));
assertThat(Strings.toString(request), equalTo("""
{"grant_type":"_kerberos","kerberos_ticket":"top secret kerberos ticket"}"""));
}
public void testEqualsAndHashCode() {

View file

@ -26,18 +26,19 @@ import static org.hamcrest.Matchers.equalTo;
public class ExpressionRoleMappingTests extends ESTestCase {
public void testExpressionRoleMappingParser() throws IOException {
final String json = "{\n"
+ " \"enabled\" : true,\n"
+ " \"roles\" : [\n"
+ " \"superuser\"\n"
+ " ],\n"
+ " \"rules\" : {\n"
+ " \"field\" : {\n"
+ " \"realm.name\" : \"kerb1\"\n"
+ " }\n"
+ " },\n"
+ " \"metadata\" : { }\n"
+ " }";
final String json = """
{
"enabled" : true,
"roles" : [
"superuser"
],
"rules" : {
"field" : {
"realm.name" : "kerb1"
}
},
"metadata" : { }
}""";
final ExpressionRoleMapping expressionRoleMapping = ExpressionRoleMapping.PARSER.parse(
XContentType.JSON.xContent()
.createParser(new NamedXContentRegistry(Collections.emptyList()), DeprecationHandler.IGNORE_DEPRECATIONS, json),

View file

@ -30,44 +30,49 @@ import static org.hamcrest.Matchers.equalTo;
public class GetPrivilegesResponseTests extends ESTestCase {
public void testFromXContent() throws IOException {
final String json = "{"
+ " \"testapp\": {"
+ " \"read\": {"
+ " \"application\": \"testapp\","
+ " \"name\": \"read\","
+ " \"actions\": [ \"action:login\", \"data:read/*\" ]"
+ " },"
+ " \"write\": {"
+ " \"application\": \"testapp\","
+ " \"name\": \"write\","
+ " \"actions\": [ \"action:login\", \"data:write/*\" ],"
+ " \"metadata\": { \"key1\": \"value1\" }"
+ " },"
+ " \"all\": {"
+ " \"application\": \"testapp\","
+ " \"name\": \"all\","
+ " \"actions\": [ \"action:login\", \"data:write/*\" , \"manage:*\"]"
+ " }"
+ " },"
+ " \"testapp2\": {"
+ " \"read\": {"
+ " \"application\": \"testapp2\","
+ " \"name\": \"read\","
+ " \"actions\": [ \"action:login\", \"data:read/*\" ],"
+ " \"metadata\": { \"key2\": \"value2\" }"
+ " },"
+ " \"write\": {"
+ " \"application\": \"testapp2\","
+ " \"name\": \"write\","
+ " \"actions\": [ \"action:login\", \"data:write/*\" ]"
+ " },"
+ " \"all\": {"
+ " \"application\": \"testapp2\","
+ " \"name\": \"all\","
+ " \"actions\": [ \"action:login\", \"data:write/*\" , \"manage:*\"]"
+ " }"
+ " }"
+ "}";
final String json = """
{
"testapp": {
"read": {
"application": "testapp",
"name": "read",
"actions": [ "action:login", "data:read/*" ]
},
"write": {
"application": "testapp",
"name": "write",
"actions": [ "action:login", "data:write/*" ],
"metadata": {
"key1": "value1"
}
},
"all": {
"application": "testapp",
"name": "all",
"actions": [ "action:login", "data:write/*", "manage:*" ]
}
},
"testapp2": {
"read": {
"application": "testapp2",
"name": "read",
"actions": [ "action:login", "data:read/*" ],
"metadata": {
"key2": "value2"
}
},
"write": {
"application": "testapp2",
"name": "write",
"actions": [ "action:login", "data:write/*" ]
},
"all": {
"application": "testapp2",
"name": "all",
"actions": [ "action:login", "data:write/*", "manage:*" ]
}
}
}""";
final GetPrivilegesResponse response = GetPrivilegesResponse.fromXContent(
XContentType.JSON.xContent()

View file

@ -25,32 +25,33 @@ import static org.hamcrest.Matchers.equalTo;
public class GetRoleMappingsResponseTests extends ESTestCase {
public void testFromXContent() throws IOException {
final String json = "{\n"
+ " \"kerberosmapping\" : {\n"
+ " \"enabled\" : true,\n"
+ " \"roles\" : [\n"
+ " \"superuser\"\n"
+ " ],\n"
+ " \"rules\" : {\n"
+ " \"field\" : {\n"
+ " \"realm.name\" : \"kerb1\"\n"
+ " }\n"
+ " },\n"
+ " \"metadata\" : { }\n"
+ " },\n"
+ " \"ldapmapping\" : {\n"
+ " \"enabled\" : false,\n"
+ " \"roles\" : [\n"
+ " \"monitoring\"\n"
+ " ],\n"
+ " \"rules\" : {\n"
+ " \"field\" : {\n"
+ " \"groups\" : \"cn=ipausers,cn=groups,cn=accounts,dc=ipademo,dc=local\"\n"
+ " }\n"
+ " },\n"
+ " \"metadata\" : { }\n"
+ " }\n"
+ "}";
final String json = """
{
"kerberosmapping" : {
"enabled" : true,
"roles" : [
"superuser"
],
"rules" : {
"field" : {
"realm.name" : "kerb1"
}
},
"metadata" : { }
},
"ldapmapping" : {
"enabled" : false,
"roles" : [
"monitoring"
],
"rules" : {
"field" : {
"groups" : "cn=ipausers,cn=groups,cn=accounts,dc=ipademo,dc=local"
}
},
"metadata" : { }
}
}""";
final GetRoleMappingsResponse response = GetRoleMappingsResponse.fromXContent(
XContentType.JSON.xContent()
.createParser(new NamedXContentRegistry(Collections.emptyList()), DeprecationHandler.IGNORE_DEPRECATIONS, json)

View file

@ -29,28 +29,29 @@ import static org.hamcrest.Matchers.equalTo;
public class GetRolesResponseTests extends ESTestCase {
public void testFromXContent() throws IOException {
String json = "{\n"
+ " \"my_admin_role\": {\n"
+ " \"cluster\" : [ \"all\" ],\n"
+ " \"indices\" : [\n"
+ " {\n"
+ " \"names\" : [ \"index1\", \"index2\" ],\n"
+ " \"privileges\" : [ \"all\" ],\n"
+ " \"allow_restricted_indices\" : true,\n"
+ " \"field_security\" : {\n"
+ " \"grant\" : [ \"title\", \"body\" ]}\n"
+ " }\n"
+ " ],\n"
+ " \"applications\" : [ ],\n"
+ " \"run_as\" : [ \"other_user\" ],\n"
+ " \"metadata\" : {\n"
+ " \"version\" : 1\n"
+ " },\n"
+ " \"transient_metadata\" : {\n"
+ " \"enabled\" : true\n"
+ " }\n"
+ " }\n"
+ "}";
String json = """
{
"my_admin_role": {
"cluster" : [ "all" ],
"indices" : [
{
"names" : [ "index1", "index2" ],
"privileges" : [ "all" ],
"allow_restricted_indices" : true,
"field_security" : {
"grant" : [ "title", "body" ]}
}
],
"applications" : [ ],
"run_as" : [ "other_user" ],
"metadata" : {
"version" : 1
},
"transient_metadata" : {
"enabled" : true
}
}
}""";
final GetRolesResponse response = GetRolesResponse.fromXContent(
(XContentType.JSON.xContent()
.createParser(new NamedXContentRegistry(Collections.emptyList()), DeprecationHandler.IGNORE_DEPRECATIONS, json))

View file

@ -30,32 +30,103 @@ import static org.hamcrest.Matchers.nullValue;
public class GetUserPrivilegesResponseTests extends ESTestCase {
public void testParse() throws Exception {
String json = "{"
+ "\"cluster\":[\"manage\",\"manage_security\",\"monitor\"],"
+ "\"global\":["
+ " {\"application\":{\"manage\":{\"applications\":[\"test-*\"]}}},"
+ " {\"application\":{\"manage\":{\"applications\":[\"apps-*\"]}}}"
+ "],"
+ "\"indices\":["
+ " {\"names\":[\"test-1-*\"],\"privileges\":[\"read\"],\"allow_restricted_indices\": false},"
+ " {\"names\":[\"test-4-*\"],\"privileges\":[\"read\"],\"allow_restricted_indices\": true,"
+ " \"field_security\":[{\"grant\":[\"*\"],\"except\":[\"private-*\"]}]},"
+ " {\"names\":[\"test-6-*\",\"test-7-*\"],\"privileges\":[\"read\"],\"allow_restricted_indices\": true,"
+ " \"query\":[\"{\\\"term\\\":{\\\"test\\\":true}}\"]},"
+ " {\"names\":[\"test-2-*\"],\"privileges\":[\"read\"],\"allow_restricted_indices\": false,"
+ " \"field_security\":[{\"grant\":[\"*\"],\"except\":[\"secret-*\",\"private-*\"]},{\"grant\":[\"apps-*\"]}],"
+ " \"query\":[\"{\\\"term\\\":{\\\"test\\\":true}}\",\"{\\\"term\\\":{\\\"apps\\\":true}}\"]},"
+ " {\"names\":[\"test-3-*\",\"test-6-*\"],\"privileges\":[\"read\",\"write\"],\"allow_restricted_indices\": true},"
+ " {\"names\":[\"test-3-*\",\"test-4-*\",\"test-5-*\"],\"privileges\":[\"read\"],\"allow_restricted_indices\": false,"
+ " \"field_security\":[{\"grant\":[\"test-*\"]}]},"
+ " {\"names\":[\"test-1-*\",\"test-9-*\"],\"privileges\":[\"all\"],\"allow_restricted_indices\": true}"
+ "],"
+ "\"applications\":["
+ " {\"application\":\"app-dne\",\"privileges\":[\"all\"],\"resources\":[\"*\"]},"
+ " {\"application\":\"test-app\",\"privileges\":[\"read\"],\"resources\":[\"object/1\",\"object/2\"]},"
+ " {\"application\":\"test-app\",\"privileges\":[\"user\",\"dne\"],\"resources\":[\"*\"]}"
+ "],"
+ "\"run_as\":[\"app-*\",\"test-*\"]}";
String json = """
{
"cluster": [ "manage", "manage_security", "monitor" ],
"global": [
{
"application": {
"manage": {
"applications": [ "test-*" ]
}
}
},
{
"application": {
"manage": {
"applications": [ "apps-*" ]
}
}
}
],
"indices": [
{
"names": [ "test-1-*" ],
"privileges": [ "read" ],
"allow_restricted_indices": false
},
{
"names": [ "test-4-*" ],
"privileges": [ "read" ],
"allow_restricted_indices": true,
"field_security": [
{
"grant": [ "*" ],
"except": [ "private-*" ]
}
]
},
{
"names": [ "test-6-*", "test-7-*" ],
"privileges": [ "read" ],
"allow_restricted_indices": true,
"query": [ "{\\"term\\":{\\"test\\":true}}" ]
},
{
"names": [ "test-2-*" ],
"privileges": [ "read" ],
"allow_restricted_indices": false,
"field_security": [
{
"grant": [ "*" ],
"except": [ "secret-*", "private-*" ]
},
{
"grant": [ "apps-*" ]
}
],
"query": [ "{\\"term\\":{\\"test\\":true}}", "{\\"term\\":{\\"apps\\":true}}" ]
},
{
"names": [ "test-3-*", "test-6-*" ],
"privileges": [ "read", "write" ],
"allow_restricted_indices": true
},
{
"names": [ "test-3-*", "test-4-*", "test-5-*" ],
"privileges": [ "read" ],
"allow_restricted_indices": false,
"field_security": [
{
"grant": [ "test-*" ]
}
]
},
{
"names": [ "test-1-*", "test-9-*" ],
"privileges": [ "all" ],
"allow_restricted_indices": true
}
],
"applications": [
{
"application": "app-dne",
"privileges": [ "all" ],
"resources": [ "*" ]
},
{
"application": "test-app",
"privileges": [ "read" ],
"resources": [ "object/1", "object/2" ]
},
{
"application": "test-app",
"privileges": [ "user", "dne" ],
"resources": [ "*" ]
}
],
"run_as": [ "app-*", "test-*" ]
}""";
final XContentParser parser = createParser(XContentType.JSON.xContent(), json);
final GetUserPrivilegesResponse response = GetUserPrivilegesResponse.fromXContent(parser);

View file

@ -13,6 +13,7 @@ import org.elasticsearch.client.security.user.privileges.Role;
import org.elasticsearch.client.security.user.privileges.Role.ClusterPrivilegeName;
import org.elasticsearch.client.security.user.privileges.Role.IndexPrivilegeName;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.EqualsHashCodeTestUtils;
@ -42,19 +43,18 @@ public class GrantApiKeyRequestTests extends ESTestCase {
final String apiKeyMetadataString = apiKeyMetadata == null
? ""
: ",\"metadata\":" + XContentTestUtils.convertToXContent(apiKeyMetadata, XContentType.JSON).utf8ToString();
assertThat(
output,
equalTo(
"{"
+ "\"grant_type\":\"password\","
+ "\"username\":\"kamala.khan\","
+ "\"password\":\"JerseyGirl!\","
+ "\"api_key\":{\"name\":\"api-key\",\"role_descriptors\":{}"
+ apiKeyMetadataString
+ "}"
+ "}"
)
);
assertThat(output, equalTo(XContentHelper.stripWhitespace("""
{
"grant_type": "password",
"username": "kamala.khan",
"password": "JerseyGirl!",
"api_key": {
"name": "api-key",
"role_descriptors": {}
%s
}
}
""".formatted(apiKeyMetadataString))));
}
public void testEqualsHashCode() {

View file

@ -47,31 +47,34 @@ public class HasPrivilegesRequestTests extends ESTestCase {
String json = Strings.toString(request);
final Map<String, Object> parsed = XContentHelper.convertToMap(XContentType.JSON.xContent(), json, false);
final Map<String, Object> expected = XContentHelper.convertToMap(
XContentType.JSON.xContent(),
"{"
+ " \"cluster\":[\"monitor\",\"manage_watcher\",\"manage_ml\"],"
+ " \"index\":[{"
+ " \"names\":[\"index-001\",\"index-002\"],"
+ " \"privileges\":[\"all\"],"
+ " \"allow_restricted_indices\":true"
+ " },{"
+ " \"names\":[\"index-003\"],"
+ " \"privileges\":[\"read\"],"
+ " \"allow_restricted_indices\":false"
+ " }],"
+ " \"application\":[{"
+ " \"application\":\"myapp\","
+ " \"privileges\":[\"read\",\"write\"],"
+ " \"resources\":[\"*\"]"
+ " },{"
+ " \"application\":\"myapp\","
+ " \"privileges\":[\"admin\"],"
+ " \"resources\":[\"/data/*\"]"
+ " }]"
+ "}",
false
);
final Map<String, Object> expected = XContentHelper.convertToMap(XContentType.JSON.xContent(), """
{
"cluster": [ "monitor", "manage_watcher", "manage_ml" ],
"index": [
{
"names": [ "index-001", "index-002" ],
"privileges": [ "all" ],
"allow_restricted_indices": true
},
{
"names": [ "index-003" ],
"privileges": [ "read" ],
"allow_restricted_indices": false
}
],
"application": [
{
"application": "myapp",
"privileges": [ "read", "write" ],
"resources": [ "*" ]
},
{
"application": "myapp",
"privileges": [ "admin" ],
"resources": [ "/data/*" ]
}
]
}""", false);
assertThat(XContentTestUtils.differenceBetweenMapsIgnoringArrayOrder(parsed, expected), Matchers.nullValue());
}

View file

@ -25,50 +25,51 @@ import static java.util.Collections.emptyMap;
public class HasPrivilegesResponseTests extends ESTestCase {
public void testParseValidResponse() throws IOException {
String json = "{"
+ " \"username\": \"namor\","
+ " \"has_all_requested\": false,"
+ " \"cluster\" : {"
+ " \"manage\" : false,"
+ " \"monitor\" : true"
+ " },"
+ " \"index\" : {"
+ " \"index-01\": {"
+ " \"read\" : true,"
+ " \"write\" : false"
+ " },"
+ " \"index-02\": {"
+ " \"read\" : true,"
+ " \"write\" : true"
+ " },"
+ " \"index-03\": {"
+ " \"read\" : false,"
+ " \"write\" : false"
+ " }"
+ " },"
+ " \"application\" : {"
+ " \"app01\" : {"
+ " \"/object/1\" : {"
+ " \"read\" : true,"
+ " \"write\" : false"
+ " },"
+ " \"/object/2\" : {"
+ " \"read\" : true,"
+ " \"write\" : true"
+ " }"
+ " },"
+ " \"app02\" : {"
+ " \"/object/1\" : {"
+ " \"read\" : false,"
+ " \"write\" : false"
+ " },"
+ " \"/object/3\" : {"
+ " \"read\" : false,"
+ " \"write\" : true"
+ " }"
+ " }"
+ " }"
+ "}";
String json = """
{
"username": "namor",
"has_all_requested": false,
"cluster": {
"manage": false,
"monitor": true
},
"index": {
"index-01": {
"read": true,
"write": false
},
"index-02": {
"read": true,
"write": true
},
"index-03": {
"read": false,
"write": false
}
},
"application": {
"app01": {
"/object/1": {
"read": true,
"write": false
},
"/object/2": {
"read": true,
"write": true
}
},
"app02": {
"/object/1": {
"read": false,
"write": false
},
"/object/3": {
"read": false,
"write": true
}
}
}
}""";
final XContentParser parser = createParser(XContentType.JSON.xContent(), json);
HasPrivilegesResponse response = HasPrivilegesResponse.fromXContent(parser);

View file

@ -60,7 +60,8 @@ public class InvalidateTokenRequestTests extends ESTestCase {
assertThat(request.getRefreshToken(), nullValue());
assertThat(request.getRealmName(), equalTo(realmName));
assertThat(request.getUsername(), equalTo(username));
assertThat(Strings.toString(request), equalTo("{\"realm_name\":\"native\",\"username\":\"user\"}"));
assertThat(Strings.toString(request), equalTo("""
{"realm_name":"native","username":"user"}"""));
}
public void testEqualsAndHashCode() {

View file

@ -62,41 +62,42 @@ public class PutPrivilegesRequestTests extends ESTestCase {
}
public void testToXContent() throws IOException {
final String expected = "{\n"
+ " \"app01\" : {\n"
+ " \"all\" : {\n"
+ " \"application\" : \"app01\",\n"
+ " \"name\" : \"all\",\n"
+ " \"actions\" : [\n"
+ " \"action:login\",\n"
+ " \"action:logout\"\n"
+ " ],\n"
+ " \"metadata\" : {\n"
+ " \"k1\" : \"v1\"\n"
+ " }\n"
+ " },\n"
+ " \"read\" : {\n"
+ " \"application\" : \"app01\",\n"
+ " \"name\" : \"read\",\n"
+ " \"actions\" : [\n"
+ " \"data:read\"\n"
+ " ]\n"
+ " }\n"
+ " },\n"
+ " \"app02\" : {\n"
+ " \"all\" : {\n"
+ " \"application\" : \"app02\",\n"
+ " \"name\" : \"all\",\n"
+ " \"actions\" : [\n"
+ " \"action:login\",\n"
+ " \"action:logout\"\n"
+ " ],\n"
+ " \"metadata\" : {\n"
+ " \"k2\" : \"v2\"\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ "}";
final String expected = """
{
"app01" : {
"all" : {
"application" : "app01",
"name" : "all",
"actions" : [
"action:login",
"action:logout"
],
"metadata" : {
"k1" : "v1"
}
},
"read" : {
"application" : "app01",
"name" : "read",
"actions" : [
"data:read"
]
}
},
"app02" : {
"all" : {
"application" : "app02",
"name" : "all",
"actions" : [
"action:login",
"action:logout"
],
"metadata" : {
"k2" : "v2"
}
}
}
}""";
List<ApplicationPrivilege> privileges = new ArrayList<>();
privileges.add(
ApplicationPrivilege.builder()

View file

@ -21,21 +21,22 @@ import static org.hamcrest.Matchers.is;
public class PutPrivilegesResponseTests extends ESTestCase {
public void testFromXContent() throws IOException {
final String json = "{\n"
+ " \"app02\": {\n"
+ " \"all\": {\n"
+ " \"created\": true\n"
+ " }\n"
+ " },\n"
+ " \"app01\": {\n"
+ " \"read\": {\n"
+ " \"created\": false\n"
+ " },\n"
+ " \"write\": {\n"
+ " \"created\": true\n"
+ " }\n"
+ " }\n"
+ "}";
final String json = """
{
"app02": {
"all": {
"created": true
}
},
"app01": {
"read": {
"created": false
},
"write": {
"created": true
}
}
}""";
final PutPrivilegesResponse putPrivilegesResponse = PutPrivilegesResponse.fromXContent(
createParser(XContentType.JSON.xContent(), json)

View file

@ -144,29 +144,22 @@ public class PutRoleMappingRequestTests extends ESTestCase {
final XContentBuilder builder = XContentFactory.jsonBuilder();
putRoleMappingRequest.toXContent(builder, ToXContent.EMPTY_PARAMS);
final String output = Strings.toString(builder);
final String expected = String.format(
Locale.ROOT,
"{"
+ " \"enabled\": %s,"
+ " \"roles\": ["
+ " \"superuser\""
+ " ],"
+ "\"role_templates\":[],"
+ "\"rules\":{"
+ " \"field\": {"
+ " \"username\": ["
+ " \"user\""
+ " ]"
+ " }"
+ "},"
+ " \"metadata\": {"
+ " \"k1\": \"v1\""
+ " }"
+ "}",
enabled
).replaceAll("\\s+", "");
final String expected = String.format(Locale.ROOT, """
{
"enabled": %s,
"roles": [ "superuser" ],
"role_templates": [],
"rules": {
"field": {
"username": [ "user" ]
}
},
"metadata": {
"k1": "v1"
}
}""", enabled);
assertThat(output, equalTo(expected));
assertThat(output.replace("\\s", ""), equalTo(expected.replaceAll("\\s+", "")));
}
public void testPutRoleMappingRequestWithTemplateToXContent() throws IOException {
@ -194,34 +187,29 @@ public class PutRoleMappingRequestTests extends ESTestCase {
final XContentBuilder builder = XContentFactory.jsonBuilder();
putRoleMappingRequest.toXContent(builder, ToXContent.EMPTY_PARAMS);
final String output = Strings.toString(builder);
final String expected = String.format(
Locale.ROOT,
"{"
+ " \"enabled\": %s,"
+ "\"roles\":[],"
+ "\"role_templates\":["
+ " {"
+ " \"template\": \"{\\\"source\\\":\\\"_realm_{{realm.name}}\\\"}\","
+ " \"format\": \"string\""
+ " },"
+ " {"
+ " \"template\": \"{\\\"source\\\":\\\"some_role\\\"}\","
+ " \"format\": \"string\""
+ " }"
+ "],"
+ "\"rules\":{"
+ " \"field\": {"
+ " \"username\": ["
+ " \"user\""
+ " ]"
+ " }"
+ "},"
+ " \"metadata\": {"
+ " \"k1\": \"v1\""
+ " }"
+ "}",
enabled
).replaceAll("\\s+", "");
final String expected = String.format(Locale.ROOT, """
{
"enabled": %s,
"roles": [],
"role_templates": [
{
"template": "{\\"source\\":\\"_realm_{{realm.name}}\\"}",
"format": "string"
},
{
"template": "{\\"source\\":\\"some_role\\"}",
"format": "string"
}
],
"rules": {
"field": {
"username": [ "user" ]
}
},
"metadata": {
"k1": "v1"
}
}""", enabled).replaceAll("\\s+", "");
assertThat(output, equalTo(expected));
}

View file

@ -13,6 +13,7 @@ import org.elasticsearch.client.security.HasPrivilegesResponse;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;
@ -74,21 +75,29 @@ public class HasPrivilegesResponseTests extends AbstractResponseTestCase<
BytesReference bytes = BytesReference.bytes(builder);
final String json = bytes.utf8ToString();
Assert.assertThat(
json,
equalTo(
"{"
+ "\"username\":\"daredevil\","
+ "\"has_all_requested\":false,"
+ "\"cluster\":{\"manage\":true},"
+ "\"index\":{"
+ "\"customers\":{\"read\":true,\"index\":true,\"delete\":true,\"manage\":false},"
+ "\"staff\":{\"read\":true,\"index\":true,\"delete\":false,\"manage\":false}"
+ "},"
+ "\"application\":{}"
+ "}"
)
);
Assert.assertThat(json, equalTo(XContentHelper.stripWhitespace("""
{
"username": "daredevil",
"has_all_requested": false,
"cluster": {
"manage": true
},
"index": {
"customers": {
"read": true,
"index": true,
"delete": true,
"manage": false
},
"staff": {
"read": true,
"index": true,
"delete": false,
"manage": false
}
},
"application": {}
}""")));
}
@Override

View file

@ -13,6 +13,7 @@ import org.elasticsearch.client.security.support.expressiondsl.expressions.AnyRo
import org.elasticsearch.client.security.support.expressiondsl.expressions.ExceptRoleMapperExpression;
import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder;
@ -43,41 +44,39 @@ public class RoleMapperExpressionDslTests extends ESTestCase {
final XContentBuilder builder = XContentFactory.jsonBuilder();
allExpression.toXContent(builder, ToXContent.EMPTY_PARAMS);
final String output = Strings.toString(builder);
final String expected = "{"
+ "\"all\":["
+ "{"
+ "\"any\":["
+ "{"
+ "\"field\":{"
+ "\"dn\":[\"*,ou=admin,dc=example,dc=com\"]"
+ "}"
+ "},"
+ "{"
+ "\"field\":{"
+ "\"username\":["
+ "\"es-admin\","
+ "\"es-system\""
+ "]"
+ "}"
+ "}"
+ "]"
+ "},"
+ "{"
+ "\"field\":{"
+ "\"groups\":[\"cn=people,dc=example,dc=com\"]"
+ "}"
+ "},"
+ "{"
+ "\"except\":{"
+ "\"field\":{"
+ "\"metadata.terminated_date\":[\"2018-09-17T00:50:01.027Z\"]"
+ "}"
+ "}"
+ "}"
+ "]"
+ "}";
final String expected = """
{
"all": [
{
"any": [
{
"field": {
"dn": [ "*,ou=admin,dc=example,dc=com" ]
}
},
{
"field": {
"username": [ "es-admin", "es-system" ]
}
}
]
},
{
"field": {
"groups": [ "cn=people,dc=example,dc=com" ]
}
},
{
"except": {
"field": {
"metadata.terminated_date": [ "2018-09-17T00:50:01.027Z" ]
}
}
}
]
}""";
assertThat(output, equalTo(expected));
assertThat(output, equalTo(XContentHelper.stripWhitespace(expected)));
}
public void testFieldRoleMapperExpressionThrowsExceptionForMissingMetadataPrefix() {

View file

@ -30,17 +30,18 @@ import static org.hamcrest.Matchers.equalTo;
public class ApplicationPrivilegeTests extends ESTestCase {
public void testFromXContentAndToXContent() throws IOException {
String json = "{\n"
+ " \"application\" : \"myapp\",\n"
+ " \"name\" : \"read\",\n"
+ " \"actions\" : [\n"
+ " \"data:read/*\",\n"
+ " \"action:login\"\n"
+ " ],\n"
+ " \"metadata\" : {\n"
+ " \"description\" : \"Read access to myapp\"\n"
+ " }\n"
+ "}";
String json = """
{
"application" : "myapp",
"name" : "read",
"actions" : [
"data:read/*",
"action:login"
],
"metadata" : {
"description" : "Read access to myapp"
}
}""";
final ApplicationPrivilege privilege = ApplicationPrivilege.fromXContent(
XContentType.JSON.xContent()
.createParser(new NamedXContentRegistry(Collections.emptyList()), DeprecationHandler.IGNORE_DEPRECATIONS, json)

View file

@ -80,26 +80,22 @@ public class GroupConfigTests extends AbstractXContentTestCase<GroupConfig> {
}
public void testLenientParsing() throws IOException {
BytesArray json = new BytesArray(
"{"
+ " \"unknown-field\": \"foo\","
+ " \"destination-field\": {"
+ " \"terms\": {"
+ " \"field\": \"term-field\""
+ " }"
+ " },"
+ " \"unknown-field-2\": \"bar\","
+ " \"destination-field2\": {"
+ " \"terms\": {"
+ " \"field\": \"term-field2\""
+ " }"
+ " },"
+ " \"array-field\": ["
+ " 1,"
+ " 2"
+ " ]"
+ "}"
);
BytesArray json = new BytesArray("""
{
"unknown-field": "foo",
"destination-field": {
"terms": {
"field": "term-field"
}
},
"unknown-field-2": "bar",
"destination-field2": {
"terms": {
"field": "term-field2"
}
},
"array-field": [ 1, 2 ]
}""");
XContentParser parser = JsonXContent.jsonXContent.createParser(
NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
@ -116,22 +112,21 @@ public class GroupConfigTests extends AbstractXContentTestCase<GroupConfig> {
}
public void testLenientParsingUnknowGroupType() throws IOException {
BytesArray json = new BytesArray(
"{"
+ " \"destination-field1\": {"
+ " \"newgroup\": {"
+ " \"field1\": \"bar\","
+ " \"field2\": \"foo\""
+ " }"
+ " },"
+ " \"unknown-field\": \"bar\","
+ " \"destination-field2\": {"
+ " \"terms\": {"
+ " \"field\": \"term-field\""
+ " }"
+ " }"
+ "}"
);
BytesArray json = new BytesArray("""
{
"destination-field1": {
"newgroup": {
"field1": "bar",
"field2": "foo"
}
},
"unknown-field": "bar",
"destination-field2": {
"terms": {
"field": "term-field"
}
}
}""");
XContentParser parser = JsonXContent.jsonXContent.createParser(
NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,

View file

@ -36,7 +36,9 @@ public class CreatedLocationHeaderIT extends ESRestTestCase {
public void testUpsert() throws IOException {
Request request = new Request("POST", "test/_update/1");
request.setJsonEntity("{" + "\"doc\": {\"test\": \"test\"}," + "\"doc_as_upsert\": true}");
request.setJsonEntity("""
{"doc": {"test": "test"},"doc_as_upsert": true}
""");
locationTestCase(client().performRequest(request));
}

View file

@ -121,13 +121,9 @@ public class NodeRestUsageIT extends ESRestTestCase {
() -> client().performRequest(new Request("GET", "_nodes/usage/_all,rest_actions"))
);
assertNotNull(exception);
assertThat(
exception.getMessage(),
containsString(
"\"type\":\"illegal_argument_exception\","
+ "\"reason\":\"request [_nodes/usage/_all,rest_actions] contains _all and individual metrics [_all,rest_actions]\""
)
);
assertThat(exception.getMessage(), containsString("""
"type":"illegal_argument_exception",\
"reason":"request [_nodes/usage/_all,rest_actions] contains _all and individual metrics [_all,rest_actions]\""""));
}
@SuppressWarnings("unchecked")
@ -146,10 +142,25 @@ public class NodeRestUsageIT extends ESRestTestCase {
Map<String, Map<String, Long>> beforeCombinedAggsUsage = getTotalUsage(beforeNodesMap);
// Do some requests to get some rest usage stats
Request create = new Request("PUT", "/test");
create.setJsonEntity(
"{\"mappings\": {\"properties\": { \"str\": {\"type\": \"keyword\"}, "
+ "\"foo\": {\"type\": \"keyword\"}, \"num\": {\"type\": \"long\"}, \"start\": {\"type\": \"date\"} } }}"
);
create.setJsonEntity("""
{
"mappings": {
"properties": {
"str": {
"type": "keyword"
},
"foo": {
"type": "keyword"
},
"num": {
"type": "long"
},
"start": {
"type": "date"
}
}
}
}""");
client().performRequest(create);
Request searchRequest = new Request("GET", "/test/_search");

View file

@ -81,17 +81,15 @@ public class NodeRoleParserTests extends LaunchersTestCase {
}
public void testYamlSyntax() throws IOException {
MachineDependentHeap.MachineNodeRole nodeRole = parseConfig(sb -> {
sb.append("node:\n");
sb.append(" roles:\n");
sb.append(" - master");
});
MachineDependentHeap.MachineNodeRole nodeRole = parseConfig(sb -> sb.append("""
node:
roles:
- master"""));
assertThat(nodeRole, equalTo(MASTER_ONLY));
nodeRole = parseConfig(sb -> {
sb.append("node:\n");
sb.append(" roles: [ml]");
});
nodeRole = parseConfig(sb -> sb.append("""
node:
roles: [ml]"""));
assertThat(nodeRole, equalTo(ML_ONLY));
}

View file

@ -258,11 +258,17 @@ public class ListPluginsCommandTests extends ESTestCase {
MockTerminal terminal = listPlugins(home);
String message = "plugin [fake_plugin1] was built for Elasticsearch version 1.0.0 but version " + Version.CURRENT + " is required";
assertEquals("fake_plugin1\nfake_plugin2\n", terminal.getOutput());
assertEquals("""
fake_plugin1
fake_plugin2
""", terminal.getOutput());
assertEquals("WARNING: " + message + "\n", terminal.getErrorOutput());
String[] params = { "-s" };
terminal = listPlugins(home, params);
assertEquals("fake_plugin1\nfake_plugin2\n", terminal.getOutput());
assertEquals("""
fake_plugin1
fake_plugin2
""", terminal.getOutput());
}
}

View file

@ -315,26 +315,10 @@ public class DocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
Object previousSecond = null;
while (firstTokens.hasNext()) {
if (false == secondTokens.hasNext()) {
fail(
second
+ " has fewer tokens than "
+ first
+ ". "
+ first
+ " has ["
+ firstTokens.next()
+ "] but "
+ second
+ " is out of tokens. "
+ first
+ "'s last token was ["
+ previousFirst
+ "] and "
+ second
+ "'s last token was' ["
+ previousSecond
+ "]"
);
fail("""
%s has fewer tokens than %s. %s has [%s] but %s is out of tokens. \
%s's last token was [%s] and %s's last token was' [%s]
""".formatted(second, first, first, firstTokens.next(), second, first, previousFirst, second, previousSecond));
}
Map<?, ?> firstToken = (Map<?, ?>) firstTokens.next();
Map<?, ?> secondToken = (Map<?, ?>) secondTokens.next();
@ -342,20 +326,11 @@ public class DocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
String secondText = (String) secondToken.get("token");
// Check the text and produce an error message with the utf8 sequence if they don't match.
if (false == secondText.equals(firstText)) {
fail(
"text differs: "
+ first
+ " was ["
+ firstText
+ "] but "
+ second
+ " was ["
+ secondText
+ "]. In utf8 those are\n"
+ new BytesRef(firstText)
+ " and\n"
+ new BytesRef(secondText)
);
fail("""
text differs: %s was [%s] but %s was [%s]. In utf8 those are
%s and
%s
""".formatted(first, firstText, second, secondText, new BytesRef(firstText), new BytesRef(secondText)));
}
// Now check the whole map just in case the text matches but something else differs
assertEquals(firstToken, secondToken);
@ -363,26 +338,10 @@ public class DocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
previousSecond = secondToken;
}
if (secondTokens.hasNext()) {
fail(
second
+ " has more tokens than "
+ first
+ ". "
+ second
+ " has ["
+ secondTokens.next()
+ "] but "
+ first
+ " is out of tokens. "
+ first
+ "'s last token was ["
+ previousFirst
+ "] and "
+ second
+ "'s last token was' ["
+ previousSecond
+ "]"
);
fail("""
%s has more tokens than %s. %s has [%s] but %s is out of tokens. \
%s's last token was [%s] and %s's last token was [%s]
""".formatted(second, first, second, secondTokens.next(), first, first, previousFirst, second, previousSecond));
}
}
}

View file

@ -654,9 +654,10 @@ public class GrokTests extends ESTestCase {
Tuple<Map.Entry<String, GrokCaptureType>, Object> verb,
List<Tuple<Map.Entry<String, GrokCaptureType>, Object>> additionalFields
) {
String logLine = "31.184.238.164 - - [24/Jul/2014:05:35:37 +0530] \"GET /logs/access.log HTTP/1.0\" 200 69849 "
+ "\"http://8rursodiol.enjin.com\" \"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) "
+ "Chrome/30.0.1599.12785 YaBrowser/13.12.1599.12785 Safari/537.36\" \"www.dlwindianrailways.com\"";
String logLine = """
31.184.238.164 - - [24/Jul/2014:05:35:37 +0530] "GET /logs/access.log HTTP/1.0" 200 69849 "http://8rursodiol.enjin.com" \
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.12785 YaBrowser/13.12.1599.12785 \
Safari/537.36" "www.dlwindianrailways.com\"""";
Grok grok = new Grok(Grok.getBuiltinPatterns(ecsCompatibility), "%{COMBINEDAPACHELOG}", logger::warn);
Map<String, GrokCaptureType> captureTypes = new HashMap<>();
@ -739,12 +740,13 @@ public class GrokTests extends ESTestCase {
bank.put("DATA", ".*?");
bank.put("QS", "(?>(?<!\\\\)(?>\"(?>\\\\.|[^\\\\\"]+)+\"|\"\"|(?>'(?>\\\\.|[^\\\\']+)+')|''|(?>`(?>\\\\.|[^\\\\`]+)+`)|``))");
String text = "83.149.9.216 - - [19/Jul/2015:08:13:42 +0000] \"GET /presentations/logstash-monitorama-2013/images/"
+ "kibana-dashboard3.png HTTP/1.1\" 200 171717 \"http://semicomplete.com/presentations/logstash-monitorama-2013/\" "
+ "\"Mozilla"
+ "/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36\"";
String pattern = "%{IPORHOST:clientip} %{USER:ident} %{USER:auth} \\[%{HTTPDATE:timestamp}\\] \"%{WORD:verb} %{DATA:request} "
+ "HTTP/%{NUMBER:httpversion}\" %{NUMBER:response:int} (?:-|%{NUMBER:bytes:int}) %{QS:referrer} %{QS:agent}";
String text = """
83.149.9.216 - - [19/Jul/2015:08:13:42 +0000] "GET /presentations/logstash-monitorama-2013/images/kibana-dashboard3.png \
HTTP/1.1" 200 171717 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac \
OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36\"""";
String pattern = """
%{IPORHOST:clientip} %{USER:ident} %{USER:auth} \\[%{HTTPDATE:timestamp}\\] "%{WORD:verb} %{DATA:request} \
HTTP/%{NUMBER:httpversion}" %{NUMBER:response:int} (?:-|%{NUMBER:bytes:int}) %{QS:referrer} %{QS:agent}""";
Grok grok = new Grok(bank, pattern, logger::warn);
assertCaptureConfig(

View file

@ -159,7 +159,13 @@ public class ConstructingObjectParserTests extends ESTestCase {
JsonXContent.jsonXContent,
// The following JSON needs to include newlines, in order to affect the line numbers
// included in the exception
"{\n" + " \"animal\": \"cat\",\n" + " \"vegetable\": 2,\n" + " \"a\": \"supercalifragilisticexpialidocious\"\n" + "}"
"""
{
"animal": "cat",
"vegetable": 2,
"a": "supercalifragilisticexpialidocious"
}
"""
);
XContentParseException e = expectThrows(
XContentParseException.class,
@ -178,7 +184,13 @@ public class ConstructingObjectParserTests extends ESTestCase {
JsonXContent.jsonXContent,
// The following JSON needs to include newlines, in order to affect the line numbers
// included in the exception
"{\n" + " \"a\": \"supercalifragilisticexpialidocious\",\n" + " \"animal\": \"cat\"\n," + " \"vegetable\": 2\n" + "}"
"""
{
"a": "supercalifragilisticexpialidocious",
"animal": "cat",
"vegetable": 2
}
"""
);
XContentParseException e = expectThrows(
XContentParseException.class,
@ -264,7 +276,8 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testIgnoreUnknownFields() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, "{ \"test\" : \"foo\", \"junk\" : 2 }");
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{ "test" : "foo", "junk" : 2 }""");
class TestStruct {
public final String test;
@ -283,7 +296,8 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testConstructObjectUsingContext() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, "{ \"animal\": \"dropbear\", \"mineral\": -8 }");
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{ "animal": "dropbear", "mineral": -8 }""");
HasCtorArguments parsed = HasCtorArguments.PARSER_INT_CONTEXT.apply(parser, 42);
assertEquals(Integer.valueOf(42), parsed.vegetable);
assertEquals("dropbear", parsed.animal);
@ -423,10 +437,8 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testParseNamedObjectInOrder() throws IOException {
XContentParser parser = createParser(
JsonXContent.jsonXContent,
"{\"named\": [ {\"a\": {}} ], \"named_in_constructor\": [ {\"b\": {}} ]}"
);
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{"named": [ {"a": {}} ], "named_in_constructor": [ {"b": {}} ]}""");
NamedObjectHolder h = NamedObjectHolder.PARSER.apply(parser, null);
assertThat(h.named, hasSize(1));
assertEquals("a", h.named.get(0).name);
@ -436,10 +448,8 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testParseNamedObjectTwoFieldsInArray() throws IOException {
XContentParser parser = createParser(
JsonXContent.jsonXContent,
"{\"named\": [ {\"a\": {}, \"b\": {}}], \"named_in_constructor\": [ {\"c\": {}} ]}"
);
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{"named": [ {"a": {}, "b": {}}], "named_in_constructor": [ {"c": {}} ]}""");
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]"));
assertThat(
@ -452,10 +462,8 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testParseNamedObjectTwoFieldsInArrayConstructorArg() throws IOException {
XContentParser parser = createParser(
JsonXContent.jsonXContent,
"{\"named\": [ {\"a\": {}}], \"named_in_constructor\": [ {\"c\": {}, \"d\": {}} ]}"
);
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{"named": [ {"a": {}}], "named_in_constructor": [ {"c": {}, "d": {}} ]}""");
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named_in_constructor]"));
assertThat(
@ -494,10 +502,8 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testParseNamedObjectJunkInArray() throws IOException {
XContentParser parser = createParser(
JsonXContent.jsonXContent,
"{\"named\": [ \"junk\" ], \"named_in_constructor\": [ {\"a\": {}} ]}"
);
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{"named": [ "junk" ], "named_in_constructor": [ {"a": {}} ]}""");
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]"));
assertThat(
@ -510,10 +516,8 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testParseNamedObjectJunkInArrayConstructorArg() throws IOException {
XContentParser parser = createParser(
JsonXContent.jsonXContent,
"{\"named\": [ {\"a\": {}} ], \"named_in_constructor\": [ \"junk\" ]}"
);
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{"named": [ {"a": {}} ], "named_in_constructor": [ "junk" ]}""");
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named_in_constructor]"));
assertThat(
@ -526,10 +530,13 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testParseNamedObjectInOrderNotSupported() throws IOException {
XContentParser parser = createParser(
JsonXContent.jsonXContent,
"{\"named\": [\n" + " {\"a\": {}}" + "],\"named_in_constructor\": {\"b\": {}}" + "}"
);
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{
"named": [ { "a": {} } ],
"named_in_constructor": {
"b": {}
}
}""");
// Create our own parser for this test so we can disable support for the "ordered" mode specified by the array above
@SuppressWarnings("unchecked")
@ -551,10 +558,13 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testParseNamedObjectInOrderNotSupportedConstructorArg() throws IOException {
XContentParser parser = createParser(
JsonXContent.jsonXContent,
"{\"named\": {\"a\": {}}, \"named_in_constructor\": [ {\"b\": {}} ]}"
);
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{
"named": {
"a": {}
},
"named_in_constructor": [ { "b": {} } ]
}""");
// Create our own parser for this test so we can disable support for the "ordered" mode specified by the array above
@SuppressWarnings("unchecked")
@ -800,21 +810,15 @@ public class ConstructingObjectParserTests extends ESTestCase {
public void testRemovalOfField() throws IOException {
{
// old_name with NO compatibility is resulting in an exception
XContentParser parser = createParserWithCompatibilityFor(
JsonXContent.jsonXContent,
"{\"old_name\": 1, \"second_field\": \"someString\"}",
RestApiVersion.current()
);
XContentParser parser = createParserWithCompatibilityFor(JsonXContent.jsonXContent, """
{"old_name": 1, "second_field": "someString"}""", RestApiVersion.current());
expectThrows(XContentParseException.class, () -> StructRemovalField.PARSER.parse(parser, null));
}
{
// old_name with compatibility is still parsed, but ignored and results in a warning
XContentParser parser = createParserWithCompatibilityFor(
JsonXContent.jsonXContent,
"{\"old_name\": 1, \"second_field\": \"someString\"}",
RestApiVersion.minimumSupported()
);
XContentParser parser = createParserWithCompatibilityFor(JsonXContent.jsonXContent, """
{"old_name": 1, "second_field": "someString"}""", RestApiVersion.minimumSupported());
StructRemovalField parse = StructRemovalField.PARSER.parse(parser, null);
assertCriticalWarnings("The field old_name has been removed and is being ignored");

View file

@ -26,34 +26,36 @@ public class DotExpandingXContentParserTests extends ESTestCase {
public void testEmbeddedObject() throws IOException {
assertXContentMatches(
"{\"test\":{\"with\":{\"dots\":{\"field\":\"value\"}}},\"nodots\":\"value2\"}",
"{\"test.with.dots\":{\"field\":\"value\"},\"nodots\":\"value2\"}"
);
assertXContentMatches("""
{"test":{"with":{"dots":{"field":"value"}}},"nodots":"value2"}\
""", """
{"test.with.dots":{"field":"value"},"nodots":"value2"}\
""");
}
public void testEmbeddedArray() throws IOException {
assertXContentMatches(
"{\"test\":{\"with\":{\"dots\":[\"field\",\"value\"]}},\"nodots\":\"value2\"}",
"{\"test.with.dots\":[\"field\",\"value\"],\"nodots\":\"value2\"}"
);
assertXContentMatches("""
{"test":{"with":{"dots":["field","value"]}},"nodots":"value2"}\
""", """
{"test.with.dots":["field","value"],"nodots":"value2"}\
""");
}
public void testEmbeddedValue() throws IOException {
assertXContentMatches(
"{\"test\":{\"with\":{\"dots\":\"value\"}},\"nodots\":\"value2\"}",
"{\"test.with.dots\":\"value\",\"nodots\":\"value2\"}"
);
assertXContentMatches("""
{"test":{"with":{"dots":"value"}},"nodots":"value2"}\
""", """
{"test.with.dots":"value","nodots":"value2"}\
""");
}
public void testSkipChildren() throws IOException {
XContentParser parser = DotExpandingXContentParser.expandDots(
createParser(JsonXContent.jsonXContent, "{ \"test.with.dots\" : \"value\", \"nodots\" : \"value2\" }")
);
XContentParser parser = DotExpandingXContentParser.expandDots(createParser(JsonXContent.jsonXContent, """
{ "test.with.dots" : "value", "nodots" : "value2" }"""));
parser.nextToken(); // start object
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
@ -76,9 +78,10 @@ public class DotExpandingXContentParserTests extends ESTestCase {
}
public void testNestedExpansions() throws IOException {
assertXContentMatches(
"{\"first\":{\"dot\":{\"second\":{\"dot\":\"value\"},\"third\":\"value\"}},\"nodots\":\"value\"}",
"{\"first.dot\":{\"second.dot\":\"value\",\"third\":\"value\"},\"nodots\":\"value\"}"
);
assertXContentMatches("""
{"first":{"dot":{"second":{"dot":"value"},"third":"value"}},"nodots":"value"}\
""", """
{"first.dot":{"second.dot":"value","third":"value"},"nodots":"value"}\
""");
}
}

View file

@ -201,7 +201,9 @@ public class InstantiatingObjectParserTests extends ESTestCase {
builder.declareString(constructorArg(), new ParseField("b"));
builder.declareString(constructorArg(), new ParseField("c"));
InstantiatingObjectParser<Annotations, Void> parser = builder.build();
try (XContentParser contentParser = createParser(JsonXContent.jsonXContent, "{\"a\": 5, \"b\":\"6\", \"c\": \"7\"}")) {
try (XContentParser contentParser = createParser(JsonXContent.jsonXContent, """
{"a": 5, "b":"6", "c": "7"}
""")) {
assertThat(parser.parse(contentParser, null), equalTo(new Annotations(5, "6", 7)));
}
}
@ -296,7 +298,9 @@ public class InstantiatingObjectParserTests extends ESTestCase {
builder.declareString(constructorArg(), new ParseField("b"));
builder.declareString(constructorArg(), new ParseField("c"));
InstantiatingObjectParser<ContextArgument, String> parser = builder.build();
try (XContentParser contentParser = createParser(JsonXContent.jsonXContent, "{\"a\": 5, \"b\":\"6\", \"c\": \"7\"}")) {
try (XContentParser contentParser = createParser(JsonXContent.jsonXContent, """
{"a": 5, "b":"6", "c": "7"}
""")) {
assertThat(parser.parse(contentParser, "context"), equalTo(new ContextArgument("context", 5, "6", 7)));
}
}

View file

@ -188,10 +188,8 @@ public class ObjectParserTests extends ESTestCase {
}
}
}
XContentParser parser = createParser(
JsonXContent.jsonXContent,
"{\"url\" : { \"host\": \"http://foobar\", \"port\" : 80}, \"name\" : \"foobarbaz\"}"
);
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{"url" : { "host": "http://foobar", "port" : 80}, "name" : "foobarbaz"}""");
ObjectParser<Foo, CustomParseContext> objectParser = new ObjectParser<>("foo");
objectParser.declareString(Foo::setName, new ParseField("name"));
objectParser.declareObjectOrDefault(Foo::setUri, (p, s) -> s.parseURI(p), () -> null, new ParseField("url"));
@ -533,7 +531,15 @@ public class ObjectParserTests extends ESTestCase {
}
public void testParseNamedObject() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": { \"a\": {\"foo\" : 11} }, \"bar\": \"baz\"}");
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{
"named": {
"a": {
"foo": 11
}
},
"bar": "baz"
}""");
NamedObjectHolder h = NamedObjectHolder.PARSER.apply(parser, null);
assertEquals("a", h.named.name);
assertEquals(11, h.named.foo);
@ -541,7 +547,8 @@ public class ObjectParserTests extends ESTestCase {
}
public void testParseNamedObjectUnexpectedArray() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [ \"a\": {\"foo\" : 11} }]");
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{"named": [ "a": {"foo" : 11} }]""");
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("[named_object_holder] named doesn't support values of type: START_ARRAY"));
}
@ -563,7 +570,8 @@ public class ObjectParserTests extends ESTestCase {
}
public void testParseNamedObjectsTwoFieldsInArray() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [ {\"a\": {}, \"b\": {}}]}");
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{"named": [ {"a": {}, "b": {}}]}""");
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectsHolder.PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("[named_objects_holder] failed to parse field [named]"));
assertThat(
@ -755,13 +763,14 @@ public class ObjectParserTests extends ESTestCase {
assertEquals("i", parser.parse(createParser(JsonXContent.jsonXContent, "{\"body\": \"i\"}"), null).get());
Exception garbageException = expectThrows(
IllegalStateException.class,
() -> parser.parse(createParser(JsonXContent.jsonXContent, "{\"noop\": {\"garbage\": \"shouldn't\"}}"), null)
() -> parser.parse(createParser(JsonXContent.jsonXContent, """
{"noop": {"garbage": "shouldn't"}}
"""), null)
);
assertEquals("parser for [noop] did not end on END_OBJECT", garbageException.getMessage());
Exception sneakyException = expectThrows(
IllegalStateException.class,
() -> parser.parse(createParser(JsonXContent.jsonXContent, "{\"noop\": {\"body\": \"shouldn't\"}}"), null)
);
Exception sneakyException = expectThrows(IllegalStateException.class, () -> parser.parse(createParser(JsonXContent.jsonXContent, """
{"noop": {"body": "shouldn't"}}
"""), null));
assertEquals("parser for [noop] did not end on END_OBJECT", sneakyException.getMessage());
}
@ -785,12 +794,16 @@ public class ObjectParserTests extends ESTestCase {
XContentParseException garbageError = expectThrows(
XContentParseException.class,
() -> parser.parse(createParser(JsonXContent.jsonXContent, "{\"noop\": [{\"garbage\": \"shouldn't\"}}]"), null)
() -> parser.parse(createParser(JsonXContent.jsonXContent, """
{"noop": [{"garbage": "shouldn't"}}]
"""), null)
);
assertEquals("expected value but got [FIELD_NAME]", garbageError.getCause().getMessage());
XContentParseException sneakyError = expectThrows(
XContentParseException.class,
() -> parser.parse(createParser(JsonXContent.jsonXContent, "{\"noop\": [{\"body\": \"shouldn't\"}}]"), null)
() -> parser.parse(createParser(JsonXContent.jsonXContent, """
{"noop": [{"body": "shouldn't"}}]
"""), null)
);
assertEquals("expected value but got [FIELD_NAME]", sneakyError.getCause().getMessage());
}
@ -879,18 +892,16 @@ public class ObjectParserTests extends ESTestCase {
}
public void testConsumeUnknownFields() throws IOException {
XContentParser parser = createParser(
JsonXContent.jsonXContent,
"{\n"
+ " \"test\" : \"foo\",\n"
+ " \"test_number\" : 2,\n"
+ " \"name\" : \"geoff\",\n"
+ " \"test_boolean\" : true,\n"
+ " \"test_null\" : null,\n"
+ " \"test_array\": [1,2,3,4],\n"
+ " \"test_nested\": { \"field\" : \"value\", \"field2\" : [ \"list1\", \"list2\" ] }\n"
+ "}"
);
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{
"test" : "foo",
"test_number" : 2,
"name" : "geoff",
"test_boolean" : true,
"test_null" : null,
"test_array": [1,2,3,4],
"test_nested": { "field" : "value", "field2" : [ "list1", "list2" ] }
}""");
ObjectParser<ObjectWithArbitraryFields, Void> op = new ObjectParser<>(
"unknown",
ObjectWithArbitraryFields::setField,
@ -943,7 +954,8 @@ public class ObjectParserTests extends ESTestCase {
assertThat(obj.a, nullValue());
assertThat(obj.b, equalTo(123L));
parser = createParser(JsonXContent.jsonXContent, "{\"a\": \"123\", \"b\": \"456\"}");
parser = createParser(JsonXContent.jsonXContent, """
{"a": "123", "b": "456"}""");
objectParser = new ObjectParser<>("foo", true, TestStruct::new);
objectParser.declareLong(TestStruct::setA, new ParseField("a"));
objectParser.declareLong(TestStruct::setB, new ParseField("b"));

View file

@ -130,10 +130,12 @@ public class XContentParserTests extends ESTestCase {
}
public void testReadMapStrings() throws IOException {
Map<String, String> map = readMapStrings("{\"foo\": {\"kbar\":\"vbar\"}}");
Map<String, String> map = readMapStrings("""
{"foo": {"kbar":"vbar"}}""");
assertThat(map.get("kbar"), equalTo("vbar"));
assertThat(map.size(), equalTo(1));
map = readMapStrings("{\"foo\": {\"kbar\":\"vbar\", \"kbaz\":\"vbaz\"}}");
map = readMapStrings("""
{"foo": {"kbar":"vbar", "kbaz":"vbaz"}}""");
assertThat(map.get("kbar"), equalTo("vbar"));
assertThat(map.get("kbaz"), equalTo("vbaz"));
assertThat(map.size(), equalTo(2));
@ -142,8 +144,26 @@ public class XContentParserTests extends ESTestCase {
}
public void testMap() throws IOException {
String source = "{\"i\": {\"_doc\": {\"f1\": {\"type\": \"text\", \"analyzer\": \"english\"}, "
+ "\"f2\": {\"type\": \"object\", \"properties\": {\"sub1\": {\"type\": \"keyword\", \"foo\": 17}}}}}}";
String source = """
{
"i": {
"_doc": {
"f1": {
"type": "text",
"analyzer": "english"
},
"f2": {
"type": "object",
"properties": {
"sub1": {
"type": "keyword",
"foo": 17
}
}
}
}
}
}""";
Map<String, Object> f1 = new HashMap<>();
f1.put("type", "text");
f1.put("analyzer", "english");
@ -339,11 +359,24 @@ public class XContentParserTests extends ESTestCase {
}
public void testGenericMap() throws IOException {
String content = "{"
+ "\"c\": { \"i\": 3, \"d\": 0.3, \"s\": \"ccc\" }, "
+ "\"a\": { \"i\": 1, \"d\": 0.1, \"s\": \"aaa\" }, "
+ "\"b\": { \"i\": 2, \"d\": 0.2, \"s\": \"bbb\" }"
+ "}";
String content = """
{
"c": {
"i": 3,
"d": 0.3,
"s": "ccc"
},
"a": {
"i": 1,
"d": 0.1,
"s": "aaa"
},
"b": {
"i": 2,
"d": 0.2,
"s": "bbb"
}
}""";
SimpleStruct structA = new SimpleStruct(1, 0.1, "aaa");
SimpleStruct structB = new SimpleStruct(2, 0.2, "bbb");
SimpleStruct structC = new SimpleStruct(3, 0.3, "ccc");
@ -357,11 +390,24 @@ public class XContentParserTests extends ESTestCase {
}
public void testGenericMapOrdered() throws IOException {
String content = "{"
+ "\"c\": { \"i\": 3, \"d\": 0.3, \"s\": \"ccc\" }, "
+ "\"a\": { \"i\": 1, \"d\": 0.1, \"s\": \"aaa\" }, "
+ "\"b\": { \"i\": 2, \"d\": 0.2, \"s\": \"bbb\" }"
+ "}";
String content = """
{
"c": {
"i": 3,
"d": 0.3,
"s": "ccc"
},
"a": {
"i": 1,
"d": 0.1,
"s": "aaa"
},
"b": {
"i": 2,
"d": 0.2,
"s": "bbb"
}
}""";
SimpleStruct structA = new SimpleStruct(1, 0.1, "aaa");
SimpleStruct structB = new SimpleStruct(2, 0.2, "bbb");
SimpleStruct structC = new SimpleStruct(3, 0.3, "ccc");
@ -376,11 +422,24 @@ public class XContentParserTests extends ESTestCase {
}
public void testGenericMap_Failure_MapContainingUnparsableValue() throws IOException {
String content = "{"
+ "\"a\": { \"i\": 1, \"d\": 0.1, \"s\": \"aaa\" }, "
+ "\"b\": { \"i\": 2, \"d\": 0.2, \"s\": 666 }, "
+ "\"c\": { \"i\": 3, \"d\": 0.3, \"s\": \"ccc\" }"
+ "}";
String content = """
{
"a": {
"i": 1,
"d": 0.1,
"s": "aaa"
},
"b": {
"i": 2,
"d": 0.2,
"s": 666
},
"c": {
"i": 3,
"d": 0.3,
"s": "ccc"
}
}""";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, content)) {
XContentParseException exception = expectThrows(
XContentParseException.class,

View file

@ -28,7 +28,10 @@ public class ESSolrSynonymParserTests extends ESTokenStreamTestCase {
public void testLenientParser() throws IOException, ParseException {
ESSolrSynonymParser parser = new ESSolrSynonymParser(true, false, true, new StandardAnalyzer());
String rules = "&,and\n" + "come,advance,approach\n";
String rules = """
&,and
come,advance,approach
""";
StringReader rulesReader = new StringReader(rules);
parser.parse(rulesReader);
SynonymMap synonymMap = parser.build();
@ -54,7 +57,10 @@ public class ESSolrSynonymParserTests extends ESTokenStreamTestCase {
public void testNonLenientParser() {
ESSolrSynonymParser parser = new ESSolrSynonymParser(true, false, false, new StandardAnalyzer());
String rules = "&,and=>and\n" + "come,advance,approach\n";
String rules = """
&,and=>and
come,advance,approach
""";
StringReader rulesReader = new StringReader(rules);
ParseException ex = expectThrows(ParseException.class, () -> parser.parse(rulesReader));
assertThat(ex.getMessage(), containsString("Invalid synonym rule at line 1"));

View file

@ -28,11 +28,12 @@ public class ESWordnetSynonymParserTests extends ESTokenStreamTestCase {
public void testLenientParser() throws IOException, ParseException {
ESWordnetSynonymParser parser = new ESWordnetSynonymParser(true, false, true, new StandardAnalyzer());
String rules = "s(100000001,1,'&',a,1,0).\n"
+ "s(100000001,2,'and',a,1,0).\n"
+ "s(100000002,1,'come',v,1,0).\n"
+ "s(100000002,2,'advance',v,1,0).\n"
+ "s(100000002,3,'approach',v,1,0).";
String rules = """
s(100000001,1,'&',a,1,0).
s(100000001,2,'and',a,1,0).
s(100000002,1,'come',v,1,0).
s(100000002,2,'advance',v,1,0).
s(100000002,3,'approach',v,1,0).""";
StringReader rulesReader = new StringReader(rules);
parser.parse(rulesReader);
SynonymMap synonymMap = parser.build();
@ -46,7 +47,10 @@ public class ESWordnetSynonymParserTests extends ESTokenStreamTestCase {
CharArraySet stopSet = new CharArraySet(1, true);
stopSet.add("bar");
ESWordnetSynonymParser parser = new ESWordnetSynonymParser(true, false, true, new StandardAnalyzer(stopSet));
String rules = "s(100000001,1,'foo',v,1,0).\n" + "s(100000001,2,'bar',v,1,0).\n" + "s(100000001,3,'baz',v,1,0).";
String rules = """
s(100000001,1,'foo',v,1,0).
s(100000001,2,'bar',v,1,0).
s(100000001,3,'baz',v,1,0).""";
StringReader rulesReader = new StringReader(rules);
parser.parse(rulesReader);
SynonymMap synonymMap = parser.build();
@ -58,11 +62,12 @@ public class ESWordnetSynonymParserTests extends ESTokenStreamTestCase {
public void testNonLenientParser() {
ESWordnetSynonymParser parser = new ESWordnetSynonymParser(true, false, false, new StandardAnalyzer());
String rules = "s(100000001,1,'&',a,1,0).\n"
+ "s(100000001,2,'and',a,1,0).\n"
+ "s(100000002,1,'come',v,1,0).\n"
+ "s(100000002,2,'advance',v,1,0).\n"
+ "s(100000002,3,'approach',v,1,0).";
String rules = """
s(100000001,1,'&',a,1,0).
s(100000001,2,'and',a,1,0).
s(100000002,1,'come',v,1,0).
s(100000002,2,'advance',v,1,0).
s(100000002,3,'approach',v,1,0).""";
StringReader rulesReader = new StringReader(rules);
ParseException ex = expectThrows(ParseException.class, () -> parser.parse(rulesReader));
assertThat(ex.getMessage(), containsString("Invalid synonym rule at line 1"));

View file

@ -61,29 +61,27 @@ public class IngestRestartIT extends ESIntegTestCase {
internalCluster().ensureAtLeastNumDataNodes(1);
internalCluster().startMasterOnlyNode();
final String pipelineId = "foo";
client().admin()
.cluster()
.preparePutPipeline(
pipelineId,
new BytesArray(
"{\n"
+ " \"processors\" : [\n"
+ " {\"set\" : {\"field\": \"any_field\", \"value\": \"any_value\"}},\n"
+ " {\"set\" : {"
+ ""
+ " \"if\" : "
+ "{\"lang\": \""
+ MockScriptEngine.NAME
+ "\", \"source\": \"throwing_script\"},"
+ " \"field\": \"any_field2\","
+ " \"value\": \"any_value2\"}"
+ " }\n"
+ " ]\n"
+ "}"
),
XContentType.JSON
)
.get();
client().admin().cluster().preparePutPipeline(pipelineId, new BytesArray("""
{
"processors": [
{
"set": {
"field": "any_field",
"value": "any_value"
}
},
{
"set": {
"if": {
"lang": "%s",
"source": "throwing_script"
},
"field": "any_field2",
"value": "any_value2"
}
}
]
}""".formatted(MockScriptEngine.NAME)), XContentType.JSON).get();
Exception e = expectThrows(
Exception.class,
@ -111,18 +109,14 @@ public class IngestRestartIT extends ESIntegTestCase {
String pipelineIdWithScript = pipelineIdWithoutScript + "_script";
internalCluster().startNode();
BytesReference pipelineWithScript = new BytesArray(
"{\n"
+ " \"processors\" : [\n"
+ " {\"script\" : {\"lang\": \""
+ MockScriptEngine.NAME
+ "\", \"source\": \"my_script\"}}\n"
+ " ]\n"
+ "}"
);
BytesReference pipelineWithoutScript = new BytesArray(
"{\n" + " \"processors\" : [\n" + " {\"set\" : {\"field\": \"y\", \"value\": 0}}\n" + " ]\n" + "}"
);
BytesReference pipelineWithScript = new BytesArray("""
{
"processors": [ { "script": { "lang": "%s", "source": "my_script" } } ]
}""".formatted(MockScriptEngine.NAME));
BytesReference pipelineWithoutScript = new BytesArray("""
{
"processors": [ { "set": { "field": "y", "value": 0 } } ]
}""");
Consumer<String> checkPipelineExists = (id) -> assertThat(
client().admin().cluster().prepareGetPipeline(id).get().pipelines().get(0).getId(),
@ -185,23 +179,16 @@ public class IngestRestartIT extends ESIntegTestCase {
public void testPipelineWithScriptProcessorThatHasStoredScript() throws Exception {
internalCluster().startNode();
client().admin()
.cluster()
.preparePutStoredScript()
.setId("1")
.setContent(
new BytesArray("{\"script\": {\"lang\": \"" + MockScriptEngine.NAME + "\", \"source\": \"my_script\"} }"),
XContentType.JSON
)
.get();
BytesReference pipeline = new BytesArray(
"{\n"
+ " \"processors\" : [\n"
+ " {\"set\" : {\"field\": \"y\", \"value\": 0}},\n"
+ " {\"script\" : {\"id\": \"1\"}}\n"
+ " ]\n"
+ "}"
);
client().admin().cluster().preparePutStoredScript().setId("1").setContent(new BytesArray("""
{"script": {"lang": "%s", "source": "my_script"} }
""".formatted(MockScriptEngine.NAME)), XContentType.JSON).get();
BytesReference pipeline = new BytesArray("""
{
"processors" : [
{"set" : {"field": "y", "value": 0}},
{"script" : {"id": "1"}}
]
}""");
client().admin().cluster().preparePutPipeline("_id", pipeline, XContentType.JSON).get();
client().prepareIndex("index")
@ -240,9 +227,12 @@ public class IngestRestartIT extends ESIntegTestCase {
String node = internalCluster().startNode();
String ingestNode = internalCluster().startNode(onlyRole(DiscoveryNodeRole.INGEST_ROLE));
BytesReference pipeline = new BytesArray(
"{\n" + " \"processors\" : [\n" + " {\"set\" : {\"field\": \"y\", \"value\": 0}}\n" + " ]\n" + "}"
);
BytesReference pipeline = new BytesArray("""
{
"processors" : [
{"set" : {"field": "y", "value": 0}}
]
}""");
client().admin().cluster().preparePutPipeline("_id", pipeline, XContentType.JSON).get();
client().prepareIndex("index")

View file

@ -192,7 +192,8 @@ public class JsonProcessorTests extends ESTestCase {
JsonProcessor jsonProcessor = new JsonProcessor(processorTag, null, "json", null, true, MERGE, false);
Map<String, Object> document = new HashMap<>();
String json = "{\"foo\": {\"bar\": \"baz\"}}";
String json = """
{"foo": {"bar": "baz"}}""";
document.put("json", json);
Map<String, Object> inner = new HashMap<>();
inner.put("bar", "override_me");
@ -211,7 +212,8 @@ public class JsonProcessorTests extends ESTestCase {
JsonProcessor jsonProcessor = new JsonProcessor(processorTag, null, "json", null, true, REPLACE, false);
Map<String, Object> document = new HashMap<>();
String json = "{\"foo\": {\"bar\": \"baz\"}}";
String json = """
{"foo": {"bar": "baz"}}""";
document.put("json", json);
Map<String, Object> inner = new HashMap<>();
inner.put("bar", "override_me");

View file

@ -30,8 +30,13 @@ import static org.hamcrest.Matchers.nullValue;
public class UpdateDatabasesIT extends ESRestTestCase {
public void test() throws Exception {
String body = "{\"pipeline\":{\"processors\":[{\"geoip\":{\"field\":\"ip\"}}]},"
+ "\"docs\":[{\"_index\":\"index\",\"_id\":\"id\",\"_source\":{\"ip\":\"89.160.20.128\"}}]}";
String body = """
{
"pipeline": {
"processors": [ { "geoip": { "field": "ip" } } ]
},
"docs": [ { "_index": "index", "_id": "id", "_source": { "ip": "89.160.20.128" } } ]
}""";
Request simulatePipelineRequest = new Request("POST", "/_ingest/pipeline/_simulate");
simulatePipelineRequest.setJsonEntity(body);
{

View file

@ -69,14 +69,20 @@ public class KibanaSystemIndexIT extends ESRestTestCase {
public void testBulkToKibanaIndex() throws IOException {
Request request = request("POST", "/_bulk");
request.setJsonEntity("{ \"index\" : { \"_index\" : \"" + indexName + "\", \"_id\" : \"1\" } }\n{ \"foo\" : \"bar\" }\n");
request.setJsonEntity("""
{ "index" : { "_index" : "%s", "_id" : "1" } }
{ "foo" : "bar" }
""".formatted(indexName));
Response response = client().performRequest(request);
assertThat(response.getStatusLine().getStatusCode(), is(200));
}
public void testRefresh() throws IOException {
Request request = request("POST", "/_bulk");
request.setJsonEntity("{ \"index\" : { \"_index\" : \"" + indexName + "\", \"_id\" : \"1\" } }\n{ \"foo\" : \"bar\" }\n");
request.setJsonEntity("""
{ "index" : { "_index" : "%s", "_id" : "1" } }
{ "foo" : "bar" }
""".formatted(indexName));
Response response = client().performRequest(request);
assertThat(response.getStatusLine().getStatusCode(), is(200));
@ -94,7 +100,10 @@ public class KibanaSystemIndexIT extends ESRestTestCase {
public void testGetFromKibanaIndex() throws IOException {
Request request = request("POST", "/_bulk");
request.setJsonEntity("{ \"index\" : { \"_index\" : \"" + indexName + "\", \"_id\" : \"1\" } }\n{ \"foo\" : \"bar\" }\n");
request.setJsonEntity("""
{ "index" : { "_index" : "%s", "_id" : "1" } }
{ "foo" : "bar" }
""".formatted(indexName));
request.addParameter("refresh", "true");
Response response = client().performRequest(request);
@ -110,28 +119,31 @@ public class KibanaSystemIndexIT extends ESRestTestCase {
public void testMultiGetFromKibanaIndex() throws IOException {
Request request = request("POST", "/_bulk");
request.setJsonEntity(
"{ \"index\" : { \"_index\" : \""
+ indexName
+ "\", \"_id\" : \"1\" } }\n{ \"foo\" : \"bar\" }\n"
+ "{ \"index\" : { \"_index\" : \""
+ indexName
+ "\", \"_id\" : \"2\" } }\n{ \"baz\" : \"tag\" }\n"
);
request.setJsonEntity("""
{ "index" : { "_index" : "%s", "_id" : "1" } }
{ "foo" : "bar" }
{ "index" : { "_index" : "%s", "_id" : "2" } }
{ "baz" : "tag" }
""".formatted(indexName, indexName));
request.addParameter("refresh", "true");
Response response = client().performRequest(request);
assertThat(response.getStatusLine().getStatusCode(), is(200));
Request getRequest = request("GET", "/_mget");
getRequest.setJsonEntity(
"{ \"docs\" : [ { \"_index\" : \""
+ indexName
+ "\", \"_id\" : \"1\" }, "
+ "{ \"_index\" : \""
+ indexName
+ "\", \"_id\" : \"2\" } ] }\n"
);
getRequest.setJsonEntity("""
{
"docs": [
{
"_index": "%s",
"_id": "1"
},
{
"_index": "%s",
"_id": "2"
}
]
}""".formatted(indexName, indexName));
Response getResponse = client().performRequest(getRequest);
assertThat(getResponse.getStatusLine().getStatusCode(), is(200));
String responseBody = EntityUtils.toString(getResponse.getEntity());
@ -143,21 +155,21 @@ public class KibanaSystemIndexIT extends ESRestTestCase {
public void testSearchFromKibanaIndex() throws IOException {
Request request = request("POST", "/_bulk");
request.setJsonEntity(
"{ \"index\" : { \"_index\" : \""
+ indexName
+ "\", \"_id\" : \"1\" } }\n{ \"foo\" : \"bar\" }\n"
+ "{ \"index\" : { \"_index\" : \""
+ indexName
+ "\", \"_id\" : \"2\" } }\n{ \"baz\" : \"tag\" }\n"
);
request.setJsonEntity("""
{ "index" : { "_index" : "%s", "_id" : "1" } }
{ "foo" : "bar" }
{ "index" : { "_index" : "%s", "_id" : "2" } }
{ "baz" : "tag" }
""".formatted(indexName, indexName));
request.addParameter("refresh", "true");
Response response = client().performRequest(request);
assertThat(response.getStatusLine().getStatusCode(), is(200));
Request searchRequest = request("GET", "/" + indexName + "/_search");
searchRequest.setJsonEntity("{ \"query\" : { \"match_all\" : {} } }\n");
searchRequest.setJsonEntity("""
{ "query" : { "match_all" : {} } }
""");
Response getResponse = client().performRequest(searchRequest);
assertThat(getResponse.getStatusLine().getStatusCode(), is(200));
String responseBody = EntityUtils.toString(getResponse.getEntity());
@ -169,14 +181,12 @@ public class KibanaSystemIndexIT extends ESRestTestCase {
public void testDeleteFromKibanaIndex() throws IOException {
Request request = request("POST", "/_bulk");
request.setJsonEntity(
"{ \"index\" : { \"_index\" : \""
+ indexName
+ "\", \"_id\" : \"1\" } }\n{ \"foo\" : \"bar\" }\n"
+ "{ \"index\" : { \"_index\" : \""
+ indexName
+ "\", \"_id\" : \"2\" } }\n{ \"baz\" : \"tag\" }\n"
);
request.setJsonEntity("""
{ "index" : { "_index" : "%s", "_id" : "1" } }
{ "foo" : "bar" }
{ "index" : { "_index" : "%s", "_id" : "2" } }
{ "baz" : "tag" }
""".formatted(indexName, indexName));
request.addParameter("refresh", "true");
Response response = client().performRequest(request);
@ -189,21 +199,21 @@ public class KibanaSystemIndexIT extends ESRestTestCase {
public void testDeleteByQueryFromKibanaIndex() throws IOException {
Request request = request("POST", "/_bulk");
request.setJsonEntity(
"{ \"index\" : { \"_index\" : \""
+ indexName
+ "\", \"_id\" : \"1\" } }\n{ \"foo\" : \"bar\" }\n"
+ "{ \"index\" : { \"_index\" : \""
+ indexName
+ "\", \"_id\" : \"2\" } }\n{ \"baz\" : \"tag\" }\n"
);
request.setJsonEntity("""
{ "index" : { "_index" : "%s", "_id" : "1" } }
{ "foo" : "bar" }
{ "index" : { "_index" : "%s", "_id" : "2" } }
{ "baz" : "tag" }
""".formatted(indexName, indexName));
request.addParameter("refresh", "true");
Response response = client().performRequest(request);
assertThat(response.getStatusLine().getStatusCode(), is(200));
Request dbqRequest = request("POST", "/" + indexName + "/_delete_by_query");
dbqRequest.setJsonEntity("{ \"query\" : { \"match_all\" : {} } }\n");
dbqRequest.setJsonEntity("""
{ "query" : { "match_all" : {} } }
""");
Response dbqResponse = client().performRequest(dbqRequest);
assertThat(dbqResponse.getStatusLine().getStatusCode(), is(200));
}
@ -279,23 +289,22 @@ public class KibanaSystemIndexIT extends ESRestTestCase {
public void testScrollingDocs() throws IOException {
Request request = request("POST", "/_bulk");
request.setJsonEntity(
"{ \"index\" : { \"_index\" : \""
+ indexName
+ "\", \"_id\" : \"1\" } }\n{ \"foo\" : \"bar\" }\n"
+ "{ \"index\" : { \"_index\" : \""
+ indexName
+ "\", \"_id\" : \"2\" } }\n{ \"baz\" : \"tag\" }\n"
+ "{ \"index\" : { \"_index\" : \""
+ indexName
+ "\", \"_id\" : \"3\" } }\n{ \"baz\" : \"tag\" }\n"
);
request.setJsonEntity("""
{ "index" : { "_index" : "%s", "_id" : "1" } }
{ "foo" : "bar" }
{ "index" : { "_index" : "%s", "_id" : "2" } }
{ "baz" : "tag" }
{ "index" : { "_index" : "%s", "_id" : "3" } }
{ "baz" : "tag" }
""".formatted(indexName, indexName, indexName));
request.addParameter("refresh", "true");
Response response = client().performRequest(request);
assertThat(response.getStatusLine().getStatusCode(), is(200));
Request searchRequest = request("GET", "/" + indexName + "/_search");
searchRequest.setJsonEntity("{ \"size\" : 1,\n\"query\" : { \"match_all\" : {} } }\n");
searchRequest.setJsonEntity("""
{ "size" : 1, "query" : { "match_all" : {} } }
""");
searchRequest.addParameter("scroll", "1m");
response = client().performRequest(searchRequest);
assertThat(response.getStatusLine().getStatusCode(), is(200));

View file

@ -39,12 +39,8 @@ public class StoredExpressionIT extends ESIntegTestCase {
}
public void testAllOpsDisabledIndexedScripts() throws IOException {
client().admin()
.cluster()
.preparePutStoredScript()
.setId("script1")
.setContent(new BytesArray("{\"script\": {\"lang\": \"expression\", \"source\": \"2\"} }"), XContentType.JSON)
.get();
client().admin().cluster().preparePutStoredScript().setId("script1").setContent(new BytesArray("""
{"script": {"lang": "expression", "source": "2"} }"""), XContentType.JSON).get();
client().prepareIndex("test").setId("1").setSource("{\"theField\":\"foo\"}", XContentType.JSON).get();
try {
client().prepareUpdate("test", "1").setScript(new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())).get();

View file

@ -135,23 +135,23 @@ public class MultiSearchTemplateIT extends ESIntegTestCase {
SearchTemplateResponse searchTemplateResponse1 = response1.getResponse();
assertThat(searchTemplateResponse1.hasResponse(), is(true));
assertHitCount(searchTemplateResponse1.getResponse(), (numDocs / 2) + (numDocs % 2));
assertThat(searchTemplateResponse1.getSource().utf8ToString(), equalTo("{\"query\":{\"match\":{\"odd\":\"true\"}}}"));
assertThat(searchTemplateResponse1.getSource().utf8ToString(), equalTo("""
{"query":{"match":{"odd":"true"}}}"""));
MultiSearchTemplateResponse.Item response2 = response.getResponses()[1];
assertThat(response2.isFailure(), is(false));
SearchTemplateResponse searchTemplateResponse2 = response2.getResponse();
assertThat(searchTemplateResponse2.hasResponse(), is(false));
assertThat(
searchTemplateResponse2.getSource().utf8ToString(),
equalTo("{\"query\":{\"match_phrase_prefix\":{\"message\":\"quick brown f\"}}}")
);
assertThat(searchTemplateResponse2.getSource().utf8ToString(), equalTo("""
{"query":{"match_phrase_prefix":{"message":"quick brown f"}}}"""));
MultiSearchTemplateResponse.Item response3 = response.getResponses()[2];
assertThat(response3.isFailure(), is(false));
SearchTemplateResponse searchTemplateResponse3 = response3.getResponse();
assertThat(searchTemplateResponse3.hasResponse(), is(true));
assertHitCount(searchTemplateResponse3.getResponse(), (numDocs / 2));
assertThat(searchTemplateResponse3.getSource().utf8ToString(), equalTo("{\"query\":{\"term\":{\"odd\":\"false\"}}}"));
assertThat(searchTemplateResponse3.getSource().utf8ToString(), equalTo("""
{"query":{"term":{"odd":"false"}}}"""));
MultiSearchTemplateResponse.Item response4 = response.getResponses()[3];
assertThat(response4.isFailure(), is(true));

View file

@ -51,7 +51,8 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
// Relates to #6318
public void testSearchRequestFail() throws Exception {
String query = "{ \"query\": {\"match_all\": {}}, \"size\" : \"{{my_size}}\" }";
String query = """
{ "query": {"match_all": {}}, "size" : "{{my_size}}" }""";
SearchRequest searchRequest = new SearchRequest();
searchRequest.indices("_all");
@ -80,12 +81,13 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
public void testTemplateQueryAsEscapedString() throws Exception {
SearchRequest searchRequest = new SearchRequest();
searchRequest.indices("_all");
String query = "{"
+ " \"source\" : \"{ \\\"size\\\": \\\"{{size}}\\\", \\\"query\\\":{\\\"match_all\\\":{}}}\","
+ " \"params\":{"
+ " \"size\": 1"
+ " }"
+ "}";
String query = """
{
"source": "{ \\"size\\": \\"{{size}}\\", \\"query\\":{\\"match_all\\":{}}}",
"params": {
"size": 1
}
}""";
SearchTemplateRequest request = SearchTemplateRequest.fromXContent(createParser(JsonXContent.jsonXContent, query));
request.setRequest(searchRequest);
SearchTemplateResponse searchResponse = client().execute(SearchTemplateAction.INSTANCE, request).get();
@ -99,13 +101,14 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
public void testTemplateQueryAsEscapedStringStartingWithConditionalClause() throws Exception {
SearchRequest searchRequest = new SearchRequest();
searchRequest.indices("_all");
String templateString = "{"
+ " \"source\" : \"{ {{#use_size}} \\\"size\\\": \\\"{{size}}\\\", {{/use_size}} \\\"query\\\":{\\\"match_all\\\":{}}}\","
+ " \"params\":{"
+ " \"size\": 1,"
+ " \"use_size\": true"
+ " }"
+ "}";
String templateString = """
{
"source": "{ {{#use_size}} \\"size\\": \\"{{size}}\\", {{/use_size}} \\"query\\":{\\"match_all\\":{}}}",
"params": {
"size": 1,
"use_size": true
}
}""";
SearchTemplateRequest request = SearchTemplateRequest.fromXContent(createParser(JsonXContent.jsonXContent, templateString));
request.setRequest(searchRequest);
SearchTemplateResponse searchResponse = client().execute(SearchTemplateAction.INSTANCE, request).get();
@ -119,13 +122,14 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
public void testTemplateQueryAsEscapedStringWithConditionalClauseAtEnd() throws Exception {
SearchRequest searchRequest = new SearchRequest();
searchRequest.indices("_all");
String templateString = "{"
+ " \"source\" : \"{ \\\"query\\\":{\\\"match_all\\\":{}} {{#use_size}}, \\\"size\\\": \\\"{{size}}\\\" {{/use_size}} }\","
+ " \"params\":{"
+ " \"size\": 1,"
+ " \"use_size\": true"
+ " }"
+ "}";
String templateString = """
{
"source": "{ \\"query\\":{\\"match_all\\":{}} {{#use_size}}, \\"size\\": \\"{{size}}\\" {{/use_size}} }",
"params": {
"size": 1,
"use_size": true
}
}""";
SearchTemplateRequest request = SearchTemplateRequest.fromXContent(createParser(JsonXContent.jsonXContent, templateString));
request.setRequest(searchRequest);
SearchTemplateResponse searchResponse = client().execute(SearchTemplateAction.INSTANCE, request).get();
@ -133,29 +137,19 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
}
public void testIndexedTemplateClient() throws Exception {
assertAcked(
client().admin()
.cluster()
.preparePutStoredScript()
.setId("testTemplate")
.setContent(
new BytesArray(
"{"
+ " \"script\": {"
+ " \"lang\": \"mustache\","
+ " \"source\": {"
+ " \"query\": {"
+ " \"match\": {"
+ " \"theField\": \"{{fieldParam}}\""
+ " }"
+ " }"
+ " }"
+ " }"
+ "}"
),
XContentType.JSON
)
);
assertAcked(client().admin().cluster().preparePutStoredScript().setId("testTemplate").setContent(new BytesArray("""
{
"script": {
"lang": "mustache",
"source": {
"query": {
"match": {
"theField": "{{fieldParam}}"
}
}
}
}
}"""), XContentType.JSON));
GetStoredScriptResponse getResponse = client().admin().cluster().prepareGetStoredScript("testTemplate").get();
assertNotNull(getResponse.getSource());
@ -187,18 +181,20 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
public void testIndexedTemplate() throws Exception {
String script = "{"
+ " \"script\": {"
+ " \"lang\": \"mustache\","
+ " \"source\": {"
+ " \"query\": {"
+ " \"match\": {"
+ " \"theField\": \"{{fieldParam}}\""
+ " }"
+ " }"
+ " }"
+ " }"
+ "}";
String script = """
{
"script": {
"lang": "mustache",
"source": {
"query": {
"match": {
"theField": "{{fieldParam}}"
}
}
}
}
}
""";
assertAcked(client().admin().cluster().preparePutStoredScript().setId("1a").setContent(new BytesArray(script), XContentType.JSON));
assertAcked(client().admin().cluster().preparePutStoredScript().setId("2").setContent(new BytesArray(script), XContentType.JSON));
@ -250,21 +246,22 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
client().admin().indices().prepareRefresh().get();
int iterations = randomIntBetween(2, 11);
String query = "{"
+ " \"script\": {"
+ " \"lang\": \"mustache\","
+ " \"source\": {"
+ " \"query\": {"
+ " \"match_phrase_prefix\": {"
+ " \"searchtext\": {"
+ " \"query\": \"{{P_Keyword1}}\","
+ " \"slop\": {{slop}}"
+ " }"
+ " }"
+ " }"
+ " }"
+ " }"
+ "}";
String query = """
{
"script": {
"lang": "mustache",
"source": {
"query": {
"match_phrase_prefix": {
"searchtext": {
"query": "{{P_Keyword1}}",
"slop": "{{slop}}"
}
}
}
}
}
}""";
for (int i = 1; i < iterations; i++) {
assertAcked(
client().admin()
@ -308,22 +305,23 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
}
public void testIndexedTemplateWithArray() throws Exception {
String multiQuery = "{\n"
+ " \"script\": {\n"
+ " \"lang\": \"mustache\",\n"
+ " \"source\": {\n"
+ " \"query\": {\n"
+ " \"terms\": {\n"
+ " \"theField\": [\n"
+ " \"{{#fieldParam}}\",\n"
+ " \"{{.}}\",\n"
+ " \"{{/fieldParam}}\"\n"
+ " ]\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ "}";
String multiQuery = """
{
"script": {
"lang": "mustache",
"source": {
"query": {
"terms": {
"theField": [
"{{#fieldParam}}",
"{{.}}",
"{{/fieldParam}}"
]
}
}
}
}
}""";
assertAcked(
client().admin().cluster().preparePutStoredScript().setId("4").setContent(new BytesArray(multiQuery), XContentType.JSON)
);

View file

@ -66,8 +66,10 @@ public class MultiSearchTemplateRequestTests extends ESTestCase {
}
public void testParseWithCarriageReturn() throws Exception {
final String content = "{\"index\":[\"test0\", \"test1\"], \"request_cache\": true}\r\n"
+ "{\"source\": {\"query\" : {\"match_{{template}}\" :{}}}, \"params\": {\"template\": \"all\" } }\r\n";
final String content = """
{"index":["test0", "test1"], "request_cache": true}
{"source": {"query" : {"match_{{template}}" :{}}}, "params": {"template": "all" } }
""";
RestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()).withContent(new BytesArray(content), XContentType.JSON)
.build();
@ -104,7 +106,8 @@ public class MultiSearchTemplateRequestTests extends ESTestCase {
searchRequest.setBatchedReduceSize(SearchRequest.DEFAULT_BATCHED_REDUCE_SIZE);
SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest(searchRequest);
searchTemplateRequest.setScript("{\"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" }}}");
searchTemplateRequest.setScript("""
{"query": { "match" : { "{{field}}" : "{{value}}" }}}""");
searchTemplateRequest.setScriptType(ScriptType.INLINE);
searchTemplateRequest.setProfile(randomBoolean());

View file

@ -40,40 +40,105 @@ public class MustacheScriptEngineTests extends ESTestCase {
public void testSimpleParameterReplace() {
Map<String, String> compileParams = Collections.singletonMap("content_type", "application/json");
{
String template = "GET _search {\"query\": "
+ "{\"boosting\": {"
+ "\"positive\": {\"match\": {\"body\": \"gift\"}},"
+ "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}"
+ "}}, \"negative_boost\": {{boost_val}} } }}";
String template = """
GET _search
{
"query": {
"boosting": {
"positive": {
"match": {
"body": "gift"
}
},
"negative": {
"term": {
"body": {
"value": "solr"
}
}
},
"negative_boost": {{boost_val}}
}
}
}""";
Map<String, Object> vars = new HashMap<>();
vars.put("boost_val", "0.3");
String o = qe.compile(null, template, TemplateScript.CONTEXT, compileParams).newInstance(vars).execute();
assertEquals(
"GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}},"
+ "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.3 } }}",
o
);
assertEquals("""
GET _search
{
"query": {
"boosting": {
"positive": {
"match": {
"body": "gift"
}
},
"negative": {
"term": {
"body": {
"value": "solr"
}
}
},
"negative_boost": 0.3
}
}
}""", o);
}
{
String template = "GET _search {\"query\": "
+ "{\"boosting\": {"
+ "\"positive\": {\"match\": {\"body\": \"gift\"}},"
+ "\"negative\": {\"term\": {\"body\": {\"value\": \"{{body_val}}\"}"
+ "}}, \"negative_boost\": {{boost_val}} } }}";
String template = """
GET _search
{
"query": {
"boosting": {
"positive": {
"match": {
"body": "gift"
}
},
"negative": {
"term": {
"body": {
"value": "{{body_val}}"
}
}
},
"negative_boost": {{boost_val}}
}
}
}""";
Map<String, Object> vars = new HashMap<>();
vars.put("boost_val", "0.3");
vars.put("body_val", "\"quick brown\"");
String o = qe.compile(null, template, TemplateScript.CONTEXT, compileParams).newInstance(vars).execute();
assertEquals(
"GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}},"
+ "\"negative\": {\"term\": {\"body\": {\"value\": \"\\\"quick brown\\\"\"}}}, \"negative_boost\": 0.3 } }}",
o
);
assertEquals("""
GET _search
{
"query": {
"boosting": {
"positive": {
"match": {
"body": "gift"
}
},
"negative": {
"term": {
"body": {
"value": "\\"quick brown\\""
}
}
},
"negative_boost": 0.3
}
}
}""", o);
}
}
public void testSimple() throws IOException {
String templateString = "{" + "\"source\":{\"match_{{template}}\": {}}," + "\"params\":{\"template\":\"all\"}" + "}";
String templateString = """
{"source":{"match_{{template}}": {}},"params":{"template":"all"}}""";
XContentParser parser = createParser(JsonXContent.jsonXContent, templateString);
Script script = Script.parse(parser);
TemplateScript.Factory compiled = qe.compile(null, script.getIdOrCode(), TemplateScript.CONTEXT, Collections.emptyMap());
@ -82,13 +147,14 @@ public class MustacheScriptEngineTests extends ESTestCase {
}
public void testParseTemplateAsSingleStringWithConditionalClause() throws IOException {
String templateString = "{"
+ " \"source\" : \"{ \\\"match_{{#use_it}}{{template}}{{/use_it}}\\\":{} }\","
+ " \"params\":{"
+ " \"template\":\"all\","
+ " \"use_it\": true"
+ " }"
+ "}";
String templateString = """
{
"source": "{ \\"match_{{#use_it}}{{template}}{{/use_it}}\\":{} }",
"params": {
"template": "all",
"use_it": true
}
}""";
XContentParser parser = createParser(JsonXContent.jsonXContent, templateString);
Script script = Script.parse(parser);
TemplateScript.Factory compiled = qe.compile(null, script.getIdOrCode(), TemplateScript.CONTEXT, Collections.emptyMap());

View file

@ -41,21 +41,52 @@ public class MustacheTests extends ESTestCase {
private ScriptEngine engine = new MustacheScriptEngine();
public void testBasics() {
String template = "GET _search {\"query\": "
+ "{\"boosting\": {"
+ "\"positive\": {\"match\": {\"body\": \"gift\"}},"
+ "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}"
+ "}}, \"negative_boost\": {{boost_val}} } }}";
String template = """
GET _search
{
"query": {
"boosting": {
"positive": {
"match": {
"body": "gift"
}
},
"negative": {
"term": {
"body": {
"value": "solr"
}
}
},
"negative_boost": {{boost_val}}
}
}
}""";
Map<String, Object> params = singletonMap("boost_val", "0.2");
TemplateScript.Factory factory = engine.compile(null, template, TemplateScript.CONTEXT, Collections.emptyMap());
TemplateScript result = factory.newInstance(params);
assertEquals(
"Mustache templating broken",
"GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}},"
+ "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.2 } }}",
result.execute()
);
assertEquals("Mustache templating broken", """
GET _search
{
"query": {
"boosting": {
"positive": {
"match": {
"body": "gift"
}
},
"negative": {
"term": {
"body": {
"value": "solr"
}
}
},
"negative_boost": 0.2
}
}
}""", result.execute());
}
public void testArrayAccess() throws Exception {
@ -151,8 +182,20 @@ public class MustacheTests extends ESTestCase {
Map<String, Object> ctx = singletonMap("ctx", human0);
assertScript("{{#toJson}}.{{/toJson}}", ctx, equalTo("{\"ctx\":{\"name\":\"John Smith\",\"age\":42,\"height\":1.84}}"));
assertScript("{{#toJson}}ctx{{/toJson}}", ctx, equalTo("{\"name\":\"John Smith\",\"age\":42,\"height\":1.84}"));
assertScript("{{#toJson}}.{{/toJson}}", ctx, equalTo(XContentHelper.stripWhitespace("""
{
"ctx": {
"name": "John Smith",
"age": 42,
"height": 1.84
}
}""")));
assertScript("{{#toJson}}ctx{{/toJson}}", ctx, equalTo(XContentHelper.stripWhitespace("""
{
"name": "John Smith",
"age": 42,
"height": 1.84
}""")));
assertScript("{{#toJson}}ctx.name{{/toJson}}", ctx, equalTo("John Smith"));
}
@ -173,34 +216,49 @@ public class MustacheTests extends ESTestCase {
Map<String, Object> ctx = singletonMap("ctx", humans);
assertScript(
"{{#toJson}}.{{/toJson}}",
ctx,
equalTo(
"{\"ctx\":{\"first\":{\"name\":\"John Smith\",\"age\":42,\"height\":1.84},\"second\":"
+ "{\"name\":\"Dave Smith\",\"age\":27,\"height\":1.71}}}"
)
);
assertScript("{{#toJson}}.{{/toJson}}", ctx, equalTo(XContentHelper.stripWhitespace("""
{
"ctx": {
"first": {
"name": "John Smith",
"age": 42,
"height": 1.84
},
"second": {
"name": "Dave Smith",
"age": 27,
"height": 1.71
}
}
}""")));
assertScript(
"{{#toJson}}ctx{{/toJson}}",
ctx,
equalTo(
"{\"first\":{\"name\":\"John Smith\",\"age\":42,\"height\":1.84},\"second\":"
+ "{\"name\":\"Dave Smith\",\"age\":27,\"height\":1.71}}"
)
);
assertScript("{{#toJson}}ctx{{/toJson}}", ctx, equalTo(XContentHelper.stripWhitespace("""
{
"first": {
"name": "John Smith",
"age": 42,
"height": 1.84
},
"second": {
"name": "Dave Smith",
"age": 27,
"height": 1.71
}
}""")));
assertScript("{{#toJson}}ctx.first{{/toJson}}", ctx, equalTo("{\"name\":\"John Smith\",\"age\":42,\"height\":1.84}"));
assertScript("{{#toJson}}ctx.first{{/toJson}}", ctx, equalTo("""
{"name":"John Smith","age":42,"height":1.84}"""));
assertScript("{{#toJson}}ctx.second{{/toJson}}", ctx, equalTo("{\"name\":\"Dave Smith\",\"age\":27,\"height\":1.71}"));
assertScript("{{#toJson}}ctx.second{{/toJson}}", ctx, equalTo("""
{"name":"Dave Smith","age":27,"height":1.71}"""));
}
public void testSimpleArrayToJSON() throws Exception {
String[] array = new String[] { "one", "two", "three" };
Map<String, Object> ctx = singletonMap("array", array);
assertScript("{{#toJson}}.{{/toJson}}", ctx, equalTo("{\"array\":[\"one\",\"two\",\"three\"]}"));
assertScript("{{#toJson}}.{{/toJson}}", ctx, equalTo("""
{"array":["one","two","three"]}"""));
assertScript("{{#toJson}}array{{/toJson}}", ctx, equalTo("[\"one\",\"two\",\"three\"]"));
assertScript("{{#toJson}}array.0{{/toJson}}", ctx, equalTo("one"));
assertScript("{{#toJson}}array.1{{/toJson}}", ctx, equalTo("two"));
@ -212,7 +270,8 @@ public class MustacheTests extends ESTestCase {
List<String> list = Arrays.asList("one", "two", "three");
Map<String, Object> ctx = singletonMap("ctx", list);
assertScript("{{#toJson}}.{{/toJson}}", ctx, equalTo("{\"ctx\":[\"one\",\"two\",\"three\"]}"));
assertScript("{{#toJson}}.{{/toJson}}", ctx, equalTo("""
{"ctx":["one","two","three"]}"""));
assertScript("{{#toJson}}ctx{{/toJson}}", ctx, equalTo("[\"one\",\"two\",\"three\"]"));
assertScript("{{#toJson}}ctx.0{{/toJson}}", ctx, equalTo("one"));
assertScript("{{#toJson}}ctx.1{{/toJson}}", ctx, equalTo("two"));
@ -255,11 +314,9 @@ public class MustacheTests extends ESTestCase {
XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2()
);
assertScript(
"{{#ctx.bulks}}{{#toJson}}.{{/toJson}}{{/ctx.bulks}}",
ctx,
equalTo("{\"index\":\"index-1\",\"id\":1,\"type\":\"type-1\"}{\"index\":\"index-2\",\"id\":2,\"type\":\"type-2\"}")
);
assertScript("{{#ctx.bulks}}{{#toJson}}.{{/toJson}}{{/ctx.bulks}}", ctx, equalTo("""
{"index":"index-1","id":1,"type":"type-1"}\
{"index":"index-2","id":2,"type":"type-2"}"""));
assertScript("{{#ctx.bulks}}<{{#toJson}}id{{/toJson}}>{{/ctx.bulks}}", ctx, equalTo("<1><2>"));
}

View file

@ -35,7 +35,10 @@ public class RestMultiSearchTemplateActionTests extends RestActionTestCase {
}
public void testTypeInPath() {
String content = "{ \"index\": \"some_index\" } \n" + "{\"source\": {\"query\" : {\"match_all\" :{}}}} \n";
String content = """
{ "index": "some_index" }
{"source": {"query" : {"match_all" :{}}}}
""";
BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8));
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(
@ -47,7 +50,10 @@ public class RestMultiSearchTemplateActionTests extends RestActionTestCase {
}
public void testTypeInBody() {
String content = "{ \"index\": \"some_index\", \"type\": \"some_type\" } \n" + "{\"source\": {\"query\" : {\"match_all\" :{}}}} \n";
String content = """
{ "index": "some_index", "type": "some_type" }\s
{"source": {"query" : {"match_all" :{}}}}\s
""";
BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8));
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(

View file

@ -9,6 +9,7 @@
package org.elasticsearch.script.mustache;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.test.AbstractXContentTestCase;
import org.elasticsearch.xcontent.ToXContent;
@ -68,7 +69,8 @@ public class SearchTemplateRequestXContentTests extends AbstractXContentTestCase
SearchTemplateRequest request = new SearchTemplateRequest();
request.setScriptType(ScriptType.INLINE);
request.setScript("{\"query\": { \"match\" : { \"{{my_field}}\" : \"{{my_value}}\" } } }");
request.setScript("""
{"query": { "match" : { "{{my_field}}" : "{{my_value}}" } } }""");
request.setProfile(true);
Map<String, Object> scriptParams = new HashMap<>();
@ -79,7 +81,8 @@ public class SearchTemplateRequestXContentTests extends AbstractXContentTestCase
XContentType contentType = randomFrom(XContentType.values());
XContentBuilder expectedRequest = XContentFactory.contentBuilder(contentType)
.startObject()
.field("source", "{\"query\": { \"match\" : { \"{{my_field}}\" : \"{{my_value}}\" } } }")
.field("source", """
{"query": { "match" : { "{{my_field}}" : "{{my_value}}" } } }""")
.startObject("params")
.field("my_field", "foo")
.field("my_value", "bar")
@ -125,41 +128,51 @@ public class SearchTemplateRequestXContentTests extends AbstractXContentTestCase
}
public void testFromXContentWithEmbeddedTemplate() throws Exception {
String source = "{"
+ " 'source' : {\n"
+ " 'query': {\n"
+ " 'terms': {\n"
+ " 'status': [\n"
+ " '{{#status}}',\n"
+ " '{{.}}',\n"
+ " '{{/status}}'\n"
+ " ]\n"
+ " }\n"
+ " }\n"
+ " }"
+ "}";
String source = """
{
'source' : {
'query': {
'terms': {
'status': [
'{{#status}}',
'{{.}}',
'{{/status}}'
]
}
}
}}""";
SearchTemplateRequest request = SearchTemplateRequest.fromXContent(newParser(source));
assertThat(request.getScript(), equalTo("{\"query\":{\"terms\":{\"status\":[\"{{#status}}\",\"{{.}}\",\"{{/status}}\"]}}}"));
assertThat(request.getScript(), equalTo("""
{"query":{"terms":{"status":["{{#status}}","{{.}}","{{/status}}"]}}}"""));
assertThat(request.getScriptType(), equalTo(ScriptType.INLINE));
assertThat(request.getScriptParams(), nullValue());
}
public void testFromXContentWithEmbeddedTemplateAndParams() throws Exception {
String source = "{"
+ " 'source' : {"
+ " 'query': { 'match' : { '{{my_field}}' : '{{my_value}}' } },"
+ " 'size' : '{{my_size}}'"
+ " },"
+ " 'params' : {"
+ " 'my_field' : 'foo',"
+ " 'my_value' : 'bar',"
+ " 'my_size' : 5"
+ " }"
+ "}";
String source = """
{
'source' : {
'query': { 'match' : { '{{my_field}}' : '{{my_value}}' } },
'size' : '{{my_size}}'
},
'params' : {
'my_field' : 'foo',
'my_value' : 'bar',
'my_size' : 5
}
}""";
SearchTemplateRequest request = SearchTemplateRequest.fromXContent(newParser(source));
assertThat(request.getScript(), equalTo("{\"query\":{\"match\":{\"{{my_field}}\":\"{{my_value}}\"}},\"size\":\"{{my_size}}\"}"));
assertThat(request.getScript(), equalTo(XContentHelper.stripWhitespace("""
{
"query": {
"match": {
"{{my_field}}": "{{my_value}}"
}
},
"size": "{{my_size}}"
}""")));
assertThat(request.getScriptType(), equalTo(ScriptType.INLINE));
assertThat(request.getScriptParams().size(), equalTo(3));
assertThat(request.getScriptParams(), hasEntry("my_field", "foo"));

View file

@ -97,7 +97,12 @@ public class BasicExpressionTests extends ScriptTestCase {
assertEquals(1, exec("return (int)1.0;"));
assertEquals((byte) 100, exec("double x = 100; return (byte)x;"));
assertEquals(3, exec("Map x = new HashMap();\n" + "Object y = x;\n" + "((Map)y).put(2, 3);\n" + "return x.get(2);\n"));
assertEquals(3, exec("""
Map x = new HashMap();
Object y = x;
((Map)y).put(2, 3);
return x.get(2);
"""));
}
public void testIllegalDefCast() {
@ -112,10 +117,11 @@ public class BasicExpressionTests extends ScriptTestCase {
assertEquals("aaabbb", exec("return \"aaa\" + \"bbb\";"));
assertEquals("aaabbb", exec("String aaa = \"aaa\", bbb = \"bbb\"; return aaa + bbb;"));
assertEquals(
"aaabbbbbbbbb",
exec("String aaa = \"aaa\", bbb = \"bbb\"; int x;\n" + "for (; x < 3; ++x) \n" + " aaa += bbb;\n" + "return aaa;")
);
assertEquals("aaabbbbbbbbb", exec("""
String aaa = "aaa", bbb = "bbb"; int x;
for (; x < 3; ++x)
aaa += bbb;
return aaa;"""));
}
public void testComp() {
@ -228,11 +234,15 @@ public class BasicExpressionTests extends ScriptTestCase {
assertEquals(0, exec("def a = ['other': ['cat': params.t]]; return a.other?.cat?.x", singletonMap("t", t), true));
// Assignments
assertNull(exec("def a = [:];\n" + "a.missing_length = a.missing?.length();\n" + "return a.missing_length", true));
assertEquals(
3,
exec("def a = [:];\n" + "a.missing = 'foo';\n" + "a.missing_length = a.missing?.length();\n" + "return a.missing_length", true)
);
assertNull(exec("""
def a = [:];
a.missing_length = a.missing?.length();
return a.missing_length""", true));
assertEquals(3, exec("""
def a = [:];
a.missing = 'foo';
a.missing_length = a.missing?.length();
return a.missing_length""", true));
// Writes, all unsupported at this point
// assertEquals(null, exec("org.elasticsearch.painless.FeatureTestObject a = null; return a?.x")); // Read field

View file

@ -34,45 +34,42 @@ public class BasicStatementTests extends ScriptTestCase {
assertEquals(2, exec("int x = 4; if (x == 5) return 1; else if (x == 4) return 2; else return 0;"));
assertEquals(1, exec("int x = 4; if (x == 5) return 1; else if (x == 4) return 1; else return 0;"));
assertEquals(
3,
exec(
"int x = 5;\n"
+ "if (x == 5) {\n"
+ " int y = 2;\n"
+ " \n"
+ " if (y == 2) {\n"
+ " x = 3;\n"
+ " }\n"
+ " \n"
+ "}\n"
+ "\n"
+ "return x;\n"
)
);
assertEquals(3, exec("""
int x = 5;
if (x == 5) {
int y = 2;
if (y == 2) {
x = 3;
}
}
return x;
"""));
}
public void testWhileStatement() {
assertEquals("aaaaaa", exec("String c = \"a\"; int x; while (x < 5) { c += \"a\"; ++x; } return c;"));
Object value = exec(
" byte[][] b = new byte[5][5]; \n"
+ " byte x = 0, y; \n"
+ " \n"
+ " while (x < 5) { \n"
+ " y = 0; \n"
+ " \n"
+ " while (y < 5) { \n"
+ " b[x][y] = (byte)(x*y); \n"
+ " ++y; \n"
+ " } \n"
+ " \n"
+ " ++x; \n"
+ " } \n"
+ " \n"
+ " return b; \n"
);
Object value = exec("""
byte[][] b = new byte[5][5];
byte x = 0, y;
while (x < 5) {
y = 0;
while (y < 5) {
b[x][y] = (byte)(x*y);
++y;
}
++x;
}
return b;
""");
byte[][] b = (byte[][]) value;
@ -86,23 +83,23 @@ public class BasicStatementTests extends ScriptTestCase {
public void testDoWhileStatement() {
assertEquals("aaaaaa", exec("String c = \"a\"; int x; do { c += \"a\"; ++x; } while (x < 5); return c;"));
Object value = exec(
" int[][] b = new int[5][5]; \n"
+ " int x = 0, y; \n"
+ " \n"
+ " do { \n"
+ " y = 0; \n"
+ " \n"
+ " do { \n"
+ " b[x][y] = x*y; \n"
+ " ++y; \n"
+ " } while (y < 5); \n"
+ " \n"
+ " ++x; \n"
+ " } while (x < 5); \n"
+ " \n"
+ " return b; \n"
);
Object value = exec("""
int[][] b = new int[5][5];
int x = 0, y;
do {
y = 0;
do {
b[x][y] = x*y;
++y;
} while (y < 5);
++x;
} while (x < 5);
return b;
""");
int[][] b = (int[][]) value;
@ -119,16 +116,16 @@ public class BasicStatementTests extends ScriptTestCase {
assertEquals(6, exec("double test() { return 0.0; }" + "int x, y; for (test(); x < 4; test()) {y += x; ++x;} return y;"));
Object value = exec(
" int[][] b = new int[5][5]; \n"
+ " for (int x = 0; x < 5; ++x) { \n"
+ " for (int y = 0; y < 5; ++y) { \n"
+ " b[x][y] = x*y; \n"
+ " } \n"
+ " } \n"
+ " \n"
+ " return b; \n"
);
Object value = exec("""
int[][] b = new int[5][5];
for (int x = 0; x < 5; ++x) {
for (int y = 0; y < 5; ++y) {
b[x][y] = x*y;
}
}
return b;
""");
int[][] b = (int[][]) value;

View file

@ -14,126 +14,85 @@ import java.time.ZonedDateTime;
public class DateTimeTests extends ScriptTestCase {
public void testLongToZonedDateTime() {
assertEquals(
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")),
exec(
"long milliSinceEpoch = 434931330000L;"
+ "Instant instant = Instant.ofEpochMilli(milliSinceEpoch);"
+ "return ZonedDateTime.ofInstant(instant, ZoneId.of('Z'));"
)
);
assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec("""
long milliSinceEpoch = 434931330000L;
Instant instant = Instant.ofEpochMilli(milliSinceEpoch);
return ZonedDateTime.ofInstant(instant, ZoneId.of('Z'));"""));
}
public void testStringToZonedDateTime() {
assertEquals(
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")),
exec(
"String milliSinceEpochString = '434931330000';"
+ "long milliSinceEpoch = Long.parseLong(milliSinceEpochString);"
+ "Instant instant = Instant.ofEpochMilli(milliSinceEpoch);"
+ "return ZonedDateTime.ofInstant(instant, ZoneId.of('Z'));"
)
);
assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec("""
String milliSinceEpochString = '434931330000';
long milliSinceEpoch = Long.parseLong(milliSinceEpochString);
Instant instant = Instant.ofEpochMilli(milliSinceEpoch);
return ZonedDateTime.ofInstant(instant, ZoneId.of('Z'));"""));
assertEquals(
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")),
exec("String datetime = '1983-10-13T22:15:30Z';" + "return ZonedDateTime.parse(datetime);")
);
assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec("""
String datetime = '1983-10-13T22:15:30Z';
return ZonedDateTime.parse(datetime);"""));
assertEquals(
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")),
exec(
"String datetime = 'Thu, 13 Oct 1983 22:15:30 GMT';"
+ "return ZonedDateTime.parse(datetime, DateTimeFormatter.RFC_1123_DATE_TIME);"
)
);
assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec("""
String datetime = 'Thu, 13 Oct 1983 22:15:30 GMT';
return ZonedDateTime.parse(datetime, DateTimeFormatter.RFC_1123_DATE_TIME);"""));
assertEquals(
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")),
exec(
"String datetime = 'custom y 1983 m 10 d 13 22:15:30 Z';"
+ "DateTimeFormatter dtf = DateTimeFormatter.ofPattern("
+ "\"'custom' 'y' yyyy 'm' MM 'd' dd HH:mm:ss VV\");"
+ "return ZonedDateTime.parse(datetime, dtf);"
)
);
assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec("""
String datetime = 'custom y 1983 m 10 d 13 22:15:30 Z';
DateTimeFormatter dtf = DateTimeFormatter.ofPattern(
"'custom' 'y' yyyy 'm' MM 'd' dd HH:mm:ss VV");
return ZonedDateTime.parse(datetime, dtf);"""));
}
public void testPiecesToZonedDateTime() {
assertEquals(
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")),
exec(
"int year = 1983;"
+ "int month = 10;"
+ "int day = 13;"
+ "int hour = 22;"
+ "int minutes = 15;"
+ "int seconds = 30;"
+ "int nanos = 0;"
+ "String tz = 'Z';"
+ "return ZonedDateTime.of(year, month, day, hour, minutes, seconds, nanos, ZoneId.of(tz));"
)
);
assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of("Z")), exec("""
int year = 1983;
int month = 10;
int day = 13;
int hour = 22;
int minutes = 15;
int seconds = 30;
int nanos = 0;
String tz = 'Z';
return ZonedDateTime.of(year, month, day, hour, minutes, seconds, nanos, ZoneId.of(tz));"""));
}
public void testZonedDatetimeToLong() {
assertEquals(
434931330000L,
exec(
"ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));"
+ "return zdt.toInstant().toEpochMilli();"
)
);
assertEquals(434931330000L, exec("""
ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));
return zdt.toInstant().toEpochMilli();"""));
}
public void testZonedDateTimeToString() {
assertEquals(
"1983-10-13T22:15:30Z",
exec(
"ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));"
+ "return zdt.format(DateTimeFormatter.ISO_INSTANT);"
)
);
assertEquals("1983-10-13T22:15:30Z", exec("""
ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));
return zdt.format(DateTimeFormatter.ISO_INSTANT);"""));
assertEquals(
"date: 1983/10/13 time: 22:15:30",
exec(
"ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));"
+ "DateTimeFormatter dtf = DateTimeFormatter.ofPattern("
+ "\"'date:' yyyy/MM/dd 'time:' HH:mm:ss\");"
+ "return zdt.format(dtf);"
)
);
assertEquals("date: 1983/10/13 time: 22:15:30", exec("""
ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));
DateTimeFormatter dtf = DateTimeFormatter.ofPattern(
"'date:' yyyy/MM/dd 'time:' HH:mm:ss");
return zdt.format(dtf);"""));
}
public void testZonedDateTimeToPieces() {
assertArrayEquals(
new int[] { 1983, 10, 13, 22, 15, 30, 100 },
(int[]) exec(
"int[] pieces = new int[7];"
+ "ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 100, ZoneId.of('Z'));"
+ "pieces[0] = zdt.year;"
+ "pieces[1] = zdt.monthValue;"
+ "pieces[2] = zdt.dayOfMonth;"
+ "pieces[3] = zdt.hour;"
+ "pieces[4] = zdt.minute;"
+ "pieces[5] = zdt.second;"
+ "pieces[6] = zdt.nano;"
+ "return pieces;"
)
);
assertArrayEquals(new int[] { 1983, 10, 13, 22, 15, 30, 100 }, (int[]) exec("""
int[] pieces = new int[7];
ZonedDateTime zdt = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 100, ZoneId.of('Z'));
pieces[0] = zdt.year;
pieces[1] = zdt.monthValue;
pieces[2] = zdt.dayOfMonth;
pieces[3] = zdt.hour;
pieces[4] = zdt.minute;
pieces[5] = zdt.second;
pieces[6] = zdt.nano;
return pieces;"""));
}
public void testLongManipulation() {
assertEquals(
ZonedDateTime.of(1983, 10, 13, 22, 15, 27, 0, ZoneId.of("Z")),
exec(
"long milliSinceEpoch = 434931330000L;"
+ "milliSinceEpoch = milliSinceEpoch - 1000L*3L;"
+ "Instant instant = Instant.ofEpochMilli(milliSinceEpoch);"
+ "return ZonedDateTime.ofInstant(instant, ZoneId.of('Z'))"
)
);
assertEquals(ZonedDateTime.of(1983, 10, 13, 22, 15, 27, 0, ZoneId.of("Z")), exec("""
long milliSinceEpoch = 434931330000L;
milliSinceEpoch = milliSinceEpoch - 1000L*3L;
Instant instant = Instant.ofEpochMilli(milliSinceEpoch);
return ZonedDateTime.ofInstant(instant, ZoneId.of('Z'))"""));
}
public void testZonedDateTimeManipulation() {
@ -161,23 +120,15 @@ public class DateTimeTests extends ScriptTestCase {
}
public void testZonedDateTimeDifference() {
assertEquals(
4989L,
exec(
"ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 11000000, ZoneId.of('Z'));"
+ "ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 13, 22, 15, 35, 0, ZoneId.of('Z'));"
+ "return ChronoUnit.MILLIS.between(zdt1, zdt2);"
)
);
assertEquals(4989L, exec("""
ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 11000000, ZoneId.of('Z'));
ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 13, 22, 15, 35, 0, ZoneId.of('Z'));
return ChronoUnit.MILLIS.between(zdt1, zdt2);"""));
assertEquals(
4L,
exec(
"ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 11000000, ZoneId.of('Z'));"
+ "ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));"
+ "return ChronoUnit.DAYS.between(zdt1, zdt2);"
)
);
assertEquals(4L, exec("""
ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 11000000, ZoneId.of('Z'));
ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));
return ChronoUnit.DAYS.between(zdt1, zdt2);"""));
}
public void compareLongs() {
@ -185,44 +136,28 @@ public class DateTimeTests extends ScriptTestCase {
}
public void compareZonedDateTimes() {
assertEquals(
true,
exec(
"ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));"
+ "ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));"
+ "return zdt1.isBefore(zdt2);"
)
);
assertEquals(true, exec("""
ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));
ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));
return zdt1.isBefore(zdt2);"""));
assertEquals(
false,
exec(
"ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));"
+ "ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));"
+ "return zdt1.isAfter(zdt2);"
)
);
assertEquals(false, exec("""
ZonedDateTime zdt1 = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));
ZonedDateTime zdt2 = ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));
return zdt1.isAfter(zdt2);"""));
}
public void testTimeZone() {
assertEquals(
ZonedDateTime.of(1983, 10, 13, 15, 15, 30, 0, ZoneId.of("America/Los_Angeles")),
exec(
"ZonedDateTime utc = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));"
+ "return utc.withZoneSameInstant(ZoneId.of('America/Los_Angeles'));"
)
);
assertEquals(ZonedDateTime.of(1983, 10, 13, 15, 15, 30, 0, ZoneId.of("America/Los_Angeles")), exec("""
ZonedDateTime utc = ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));
return utc.withZoneSameInstant(ZoneId.of('America/Los_Angeles'));"""));
assertEquals(
"Thu, 13 Oct 1983 15:15:30 -0700",
exec(
"String gmtString = 'Thu, 13 Oct 1983 22:15:30 GMT';"
+ "ZonedDateTime gmtZdt = ZonedDateTime.parse(gmtString,"
+ "DateTimeFormatter.RFC_1123_DATE_TIME);"
+ "ZonedDateTime pstZdt ="
+ "gmtZdt.withZoneSameInstant(ZoneId.of('America/Los_Angeles'));"
+ "return pstZdt.format(DateTimeFormatter.RFC_1123_DATE_TIME);"
)
);
assertEquals("Thu, 13 Oct 1983 15:15:30 -0700", exec("""
String gmtString = 'Thu, 13 Oct 1983 22:15:30 GMT';
ZonedDateTime gmtZdt = ZonedDateTime.parse(gmtString,
DateTimeFormatter.RFC_1123_DATE_TIME);
ZonedDateTime pstZdt =
gmtZdt.withZoneSameInstant(ZoneId.of('America/Los_Angeles'));
return pstZdt.format(DateTimeFormatter.RFC_1123_DATE_TIME);"""));
}
}

View file

@ -47,14 +47,12 @@ public class DebugTests extends ScriptTestCase {
assertThat(e.getHeaders(painlessLookup), not(hasKey("es.painless_class")));
// You can't catch the explain exception
e = expectScriptThrows(
PainlessExplainError.class,
() -> exec(
"try {\n" + " Debug.explain(params.a)\n" + "} catch (Exception e) {\n" + " return 1\n" + "}",
singletonMap("a", dummy),
true
)
);
e = expectScriptThrows(PainlessExplainError.class, () -> exec("""
try {
Debug.explain(params.a)
} catch (Exception e) {
return 1
}""", singletonMap("a", dummy), true));
assertSame(dummy, e.getObjectToExplain());
}

View file

@ -23,7 +23,10 @@ public class JsonTests extends ScriptTestCase {
// pretty print
output = exec("Json.dump(params.data, true)", singletonMap("data", singletonMap("hello", "world")), true);
assertEquals("{\n \"hello\" : \"world\"\n}", output);
assertEquals("""
{
"hello" : "world"
}""", output);
}
public void testLoad() {

View file

@ -18,15 +18,16 @@ public class UserFunctionTests extends ScriptTestCase {
}
public void testUserFunctionDefCallRef() {
String source = "String getSource() { 'source'; }\n"
+ "int myCompare(int a, int b) { getMulti() * Integer.compare(a, b) }\n"
+ "int getMulti() { return -1 }\n"
+ "def l = [1, 100, -100];\n"
+ "if (myCompare(10, 50) > 0) { l.add(50 + getMulti()) }\n"
+ "l.sort(this::myCompare);\n"
+ "if (l[0] == 100) { l.remove(l.size() - 1) ; l.sort((a, b) -> -1 * myCompare(a, b)) } \n"
+ "if (getSource().startsWith('sour')) { l.add(255); }\n"
+ "return l;";
String source = """
String getSource() { 'source'; }
int myCompare(int a, int b) { getMulti() * Integer.compare(a, b) }
int getMulti() { return -1 }
def l = [1, 100, -100];
if (myCompare(10, 50) > 0) { l.add(50 + getMulti()) }
l.sort(this::myCompare);
if (l[0] == 100) { l.remove(l.size() - 1) ; l.sort((a, b) -> -1 * myCompare(a, b)) }\s
if (getSource().startsWith('sour')) { l.add(255); }
return l;""";
assertEquals(List.of(1, 49, 100, 255), exec(source));
assertBytecodeExists(source, "public &getSource()Ljava/lang/String");
assertBytecodeExists(source, "public &getMulti()I");
@ -36,62 +37,71 @@ public class UserFunctionTests extends ScriptTestCase {
}
public void testChainedUserMethods() {
String source = "int myCompare(int a, int b) { getMulti() * (a - b) }\n"
+ "int getMulti() { -1 }\n"
+ "List l = [1, 100, -100];\n"
+ "l.sort(this::myCompare);\n"
+ "l;\n";
String source = """
int myCompare(int a, int b) { getMulti() * (a - b) }
int getMulti() { -1 }
List l = [1, 100, -100];
l.sort(this::myCompare);
l;
""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
}
public void testChainedUserMethodsLambda() {
String source = "int myCompare(int a, int b) { getMulti() * (a - b) }\n"
+ "int getMulti() { -1 }\n"
+ "List l = [1, 100, -100];\n"
+ "l.sort((a, b) -> myCompare(a, b));\n"
+ "l;\n";
String source = """
int myCompare(int a, int b) { getMulti() * (a - b) }
int getMulti() { -1 }
List l = [1, 100, -100];
l.sort((a, b) -> myCompare(a, b));
l;
""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
}
public void testChainedUserMethodsDef() {
String source = "int myCompare(int a, int b) { getMulti() * (a - b) }\n"
+ "int getMulti() { -1 }\n"
+ "def l = [1, 100, -100];\n"
+ "l.sort(this::myCompare);\n"
+ "l;\n";
String source = """
int myCompare(int a, int b) { getMulti() * (a - b) }
int getMulti() { -1 }
def l = [1, 100, -100];
l.sort(this::myCompare);
l;
""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
}
public void testChainedUserMethodsLambdaDef() {
String source = "int myCompare(int a, int b) { getMulti() * (a - b) }\n"
+ "int getMulti() { -1 }\n"
+ "def l = [1, 100, -100];\n"
+ "l.sort((a, b) -> myCompare(a, b));\n"
+ "l;\n";
String source = """
int myCompare(int a, int b) { getMulti() * (a - b) }
int getMulti() { -1 }
def l = [1, 100, -100];
l.sort((a, b) -> myCompare(a, b));
l;
""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
}
public void testChainedUserMethodsLambdaCaptureDef() {
String source = "int myCompare(int a, int b, int x, int m) { getMulti(m) * (a - b + x) }\n"
+ "int getMulti(int m) { -1 * m }\n"
+ "def l = [1, 100, -100];\n"
+ "int cx = 100;\n"
+ "int cm = 1;\n"
+ "l.sort((a, b) -> myCompare(a, b, cx, cm));\n"
+ "l;\n";
String source = """
int myCompare(int a, int b, int x, int m) { getMulti(m) * (a - b + x) }
int getMulti(int m) { -1 * m }
def l = [1, 100, -100];
int cx = 100;
int cm = 1;
l.sort((a, b) -> myCompare(a, b, cx, cm));
l;
""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
}
public void testMethodReferenceInUserFunction() {
String source = "int myCompare(int a, int b, String s) { "
+ " Map m = ['f': 5];"
+ " a - b + m.computeIfAbsent(s, this::getLength) "
+ "}\n"
+ "int getLength(String s) { s.length() }\n"
+ "def l = [1, 0, -2];\n"
+ "String s = 'g';\n"
+ "l.sort((a, b) -> myCompare(a, b, s));\n"
+ "l;\n";
String source = """
int myCompare(int a, int b, String s) { Map m = ['f': 5]; a - b + m.computeIfAbsent(s, this::getLength) }
int getLength(String s) { s.length() }
def l = [1, 0, -2];
String s = 'g';
l.sort((a, b) -> myCompare(a, b, s));
l;
""";
assertEquals(List.of(-2, 1, 0), exec(source, Map.of("a", 1), false));
}
@ -102,44 +112,54 @@ public class UserFunctionTests extends ScriptTestCase {
}
public void testUserFunctionRef() {
String source = "int myCompare(int x, int y) { return -1 * x - y }\n"
+ "List l = [1, 100, -100];\n"
+ "l.sort(this::myCompare);\n"
+ "return l;";
String source = """
int myCompare(int x, int y) { return -1 * x - y }
List l = [1, 100, -100];
l.sort(this::myCompare);
return l;""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "public &myCompare(II)I");
}
public void testUserFunctionRefEmpty() {
String source = "int myCompare(int x, int y) { return -1 * x - y }\n" + "[].sort((a, b) -> myCompare(a, b));\n";
String source = """
int myCompare(int x, int y) { return -1 * x - y }
[].sort((a, b) -> myCompare(a, b));
""";
assertNull(exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "public &myCompare(II)I");
assertBytecodeExists(source, "INVOKEVIRTUAL org/elasticsearch/painless/PainlessScript$Script.&myCompare (II)I");
}
public void testUserFunctionCallInLambda() {
String source = "int myCompare(int x, int y) { -1 * ( x - y ) }\n"
+ "List l = [1, 100, -100];\n"
+ "l.sort((a, b) -> myCompare(a, b));\n"
+ "return l;";
String source = """
int myCompare(int x, int y) { -1 * ( x - y ) }
List l = [1, 100, -100];
l.sort((a, b) -> myCompare(a, b));
return l;""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "public &myCompare(II)I");
assertBytecodeExists(source, "INVOKEVIRTUAL org/elasticsearch/painless/PainlessScript$Script.&myCompare (II)I");
}
public void testUserFunctionLambdaCapture() {
String source = "int myCompare(Object o, int x, int y) { return o != null ? -1 * ( x - y ) : ( x - y ) }\n"
+ "List l = [1, 100, -100];\n"
+ "Object q = '';\n"
+ "l.sort((a, b) -> myCompare(q, a, b));\n"
+ "return l;";
String source = """
int myCompare(Object o, int x, int y) { return o != null ? -1 * ( x - y ) : ( x - y ) }
List l = [1, 100, -100];
Object q = '';
l.sort((a, b) -> myCompare(q, a, b));
return l;""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "public &myCompare(Ljava/lang/Object;II)I");
assertBytecodeExists(source, "INVOKEVIRTUAL org/elasticsearch/painless/PainlessScript$Script.&myCompare (Ljava/lang/Object;II)I");
}
public void testLambdaCapture() {
String source = "List l = [1, 100, -100];\n" + "int q = -1;\n" + "l.sort((a, b) -> q * ( a - b ));\n" + "return l;";
String source = """
List l = [1, 100, -100];
int q = -1;
l.sort((a, b) -> q * ( a - b ));
return l;""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "public static synthetic lambda$synthetic$0(ILjava/lang/Object;Ljava/lang/Object;)I");
}

View file

@ -102,11 +102,8 @@ public class PainlessExecuteApiTests extends ESSingleNodeTestCase {
ScriptService scriptService = getInstanceFromNode(ScriptService.class);
IndexService indexService = createIndex("index", Settings.EMPTY, "doc", "rank", "type=long", "text", "type=text");
Request.ContextSetup contextSetup = new Request.ContextSetup(
"index",
new BytesArray("{\"rank\": 4.0, \"text\": \"quick brown fox\"}"),
new MatchQueryBuilder("text", "fox")
);
Request.ContextSetup contextSetup = new Request.ContextSetup("index", new BytesArray("""
{"rank": 4.0, "text": "quick brown fox"}"""), new MatchQueryBuilder("text", "fox"));
contextSetup.setXContentType(XContentType.JSON);
Request request = new Request(
new Script(
@ -126,11 +123,8 @@ public class PainlessExecuteApiTests extends ESSingleNodeTestCase {
ScriptService scriptService = getInstanceFromNode(ScriptService.class);
IndexService indexService = createIndex("index", Settings.EMPTY, "doc", "rank", "type=long", "text", "type=text");
Request.ContextSetup contextSetup = new Request.ContextSetup(
"index",
new BytesArray("{\"rank\": 4.0, \"text\": \"quick brown fox\"}"),
new MatchQueryBuilder("text", "fox")
);
Request.ContextSetup contextSetup = new Request.ContextSetup("index", new BytesArray("""
{"rank": 4.0, "text": "quick brown fox"}"""), new MatchQueryBuilder("text", "fox"));
contextSetup.setXContentType(XContentType.JSON);
Request request = new Request(
new Script(ScriptType.INLINE, "painless", "emit(doc['rank'].value < params.max_rank)", singletonMap("max_rank", 5.0)),
@ -171,18 +165,10 @@ public class PainlessExecuteApiTests extends ESSingleNodeTestCase {
contextSetup = new Request.ContextSetup("index", new BytesArray("{}"), new MatchAllQueryBuilder());
contextSetup.setXContentType(XContentType.JSON);
request = new Request(
new Script(
ScriptType.INLINE,
"painless",
"emit(ZonedDateTime.parse(\"2021-01-01T00:00:00Z\").toInstant().toEpochMilli());\n"
+ "emit(ZonedDateTime.parse(\"1942-05-31T15:16:17Z\").toInstant().toEpochMilli());\n"
+ "emit(ZonedDateTime.parse(\"2035-10-13T10:54:19Z\").toInstant().toEpochMilli());",
emptyMap()
),
"date_field",
contextSetup
);
request = new Request(new Script(ScriptType.INLINE, "painless", """
emit(ZonedDateTime.parse("2021-01-01T00:00:00Z").toInstant().toEpochMilli());
emit(ZonedDateTime.parse("1942-05-31T15:16:17Z").toInstant().toEpochMilli());
emit(ZonedDateTime.parse("2035-10-13T10:54:19Z").toInstant().toEpochMilli());""", emptyMap()), "date_field", contextSetup);
response = innerShardOperation(request, scriptService, indexService);
assertEquals(
Arrays.asList("2021-01-01T00:00:00.000Z", "1942-05-31T15:16:17.000Z", "2035-10-13T10:54:19.000Z"),
@ -348,11 +334,8 @@ public class PainlessExecuteApiTests extends ESSingleNodeTestCase {
ScriptService scriptService = getInstanceFromNode(ScriptService.class);
IndexService indexService = createIndex("index", Settings.EMPTY, "doc", "rank", "type=long", "text", "type=keyword");
Request.ContextSetup contextSetup = new Request.ContextSetup(
"index",
new BytesArray("{\"rank\": 4.0, \"text\": \"quick brown fox\"}"),
new MatchQueryBuilder("text", "fox")
);
Request.ContextSetup contextSetup = new Request.ContextSetup("index", new BytesArray("""
{"rank": 4.0, "text": "quick brown fox"}"""), new MatchQueryBuilder("text", "fox"));
contextSetup.setXContentType(XContentType.JSON);
contextSetup.setXContentType(XContentType.JSON);
Request request = new Request(

View file

@ -738,39 +738,40 @@ public class ShapeBuilderTests extends ESTestCase {
}
public void testPolygon3D() {
String expected = "{\n"
+ " \"type\" : \"polygon\",\n"
+ " \"orientation\" : \"right\",\n"
+ " \"coordinates\" : [\n"
+ " [\n"
+ " [\n"
+ " -45.0,\n"
+ " 30.0,\n"
+ " 100.0\n"
+ " ],\n"
+ " [\n"
+ " 45.0,\n"
+ " 30.0,\n"
+ " 75.0\n"
+ " ],\n"
+ " [\n"
+ " 45.0,\n"
+ " -30.0,\n"
+ " 77.0\n"
+ " ],\n"
+ " [\n"
+ " -45.0,\n"
+ " -30.0,\n"
+ " 101.0\n"
+ " ],\n"
+ " [\n"
+ " -45.0,\n"
+ " 30.0,\n"
+ " 110.0\n"
+ " ]\n"
+ " ]\n"
+ " ]\n"
+ "}";
String expected = """
{
"type" : "polygon",
"orientation" : "right",
"coordinates" : [
[
[
-45.0,
30.0,
100.0
],
[
45.0,
30.0,
75.0
],
[
45.0,
-30.0,
77.0
],
[
-45.0,
-30.0,
101.0
],
[
-45.0,
30.0,
110.0
]
]
]
}""";
PolygonBuilder pb = new PolygonBuilder(
new CoordinatesBuilder().coordinate(new Coordinate(-45, 30, 100))

View file

@ -101,13 +101,23 @@ public class RankFeatureQueryBuilderTests extends AbstractQueryTestCase<RankFeat
}
public void testDefaultScoreFunction() throws IOException {
String query = "{\n" + " \"rank_feature\" : {\n" + " \"field\": \"my_feature_field\"\n" + " }\n" + "}";
String query = """
{
"rank_feature" : {
"field": "my_feature_field"
}
}""";
Query parsedQuery = parseQuery(query).toQuery(createSearchExecutionContext());
assertEquals(FeatureField.newSaturationQuery("_feature", "my_feature_field"), parsedQuery);
}
public void testIllegalField() {
String query = "{\n" + " \"rank_feature\" : {\n" + " \"field\": \"" + TEXT_FIELD_NAME + "\"\n" + " }\n" + "}";
String query = """
{
"rank_feature" : {
"field": "%s"
}
}""".formatted(TEXT_FIELD_NAME);
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> parseQuery(query).toQuery(createSearchExecutionContext())
@ -119,14 +129,15 @@ public class RankFeatureQueryBuilderTests extends AbstractQueryTestCase<RankFeat
}
public void testIllegalCombination() {
String query = "{\n"
+ " \"rank_feature\" : {\n"
+ " \"field\": \"my_negative_feature_field\",\n"
+ " \"log\" : {\n"
+ " \"scaling_factor\": 4.5\n"
+ " }\n"
+ " }\n"
+ "}";
String query = """
{
"rank_feature" : {
"field": "my_negative_feature_field",
"log" : {
"scaling_factor": 4.5
}
}
}""";
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> parseQuery(query).toQuery(createSearchExecutionContext())

View file

@ -208,41 +208,42 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
}
public void testFromJson() throws IOException {
String query = "{\n"
+ " \"has_child\" : {\n"
+ " \"query\" : {\n"
+ " \"range\" : {\n"
+ " \"mapped_string\" : {\n"
+ " \"gte\" : \"agJhRET\",\n"
+ " \"lte\" : \"zvqIq\",\n"
+ " \"boost\" : 1.0\n"
+ " }\n"
+ " }\n"
+ " },\n"
+ " \"type\" : \"child\",\n"
+ " \"score_mode\" : \"avg\",\n"
+ " \"min_children\" : 883170873,\n"
+ " \"max_children\" : 1217235442,\n"
+ " \"ignore_unmapped\" : false,\n"
+ " \"boost\" : 2.0,\n"
+ " \"_name\" : \"WNzYMJKRwePuRBh\",\n"
+ " \"inner_hits\" : {\n"
+ " \"name\" : \"inner_hits_name\",\n"
+ " \"ignore_unmapped\" : false,\n"
+ " \"from\" : 0,\n"
+ " \"size\" : 100,\n"
+ " \"version\" : false,\n"
+ " \"seq_no_primary_term\" : false,\n"
+ " \"explain\" : false,\n"
+ " \"track_scores\" : false,\n"
+ " \"sort\" : [ {\n"
+ " \"mapped_string\" : {\n"
+ " \"order\" : \"asc\"\n"
+ " }\n"
+ " } ]\n"
+ " }\n"
+ " }\n"
+ "}";
String query = """
{
"has_child" : {
"query" : {
"range" : {
"mapped_string" : {
"gte" : "agJhRET",
"lte" : "zvqIq",
"boost" : 1.0
}
}
},
"type" : "child",
"score_mode" : "avg",
"min_children" : 883170873,
"max_children" : 1217235442,
"ignore_unmapped" : false,
"boost" : 2.0,
"_name" : "WNzYMJKRwePuRBh",
"inner_hits" : {
"name" : "inner_hits_name",
"ignore_unmapped" : false,
"from" : 0,
"size" : 100,
"version" : false,
"seq_no_primary_term" : false,
"explain" : false,
"track_scores" : false,
"sort" : [ {
"mapped_string" : {
"order" : "asc"
}
} ]
}
}
}""";
HasChildQueryBuilder queryBuilder = (HasChildQueryBuilder) parseQuery(query);
checkGeneratedJson(query, queryBuilder);
assertEquals(query, queryBuilder.maxChildren(), 1217235442);

View file

@ -187,22 +187,23 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
}
public void testFromJson() throws IOException {
String json = "{\n"
+ " \"has_parent\" : {\n"
+ " \"query\" : {\n"
+ " \"term\" : {\n"
+ " \"tag\" : {\n"
+ " \"value\" : \"something\",\n"
+ " \"boost\" : 1.0\n"
+ " }\n"
+ " }\n"
+ " },\n"
+ " \"parent_type\" : \"blog\",\n"
+ " \"score\" : true,\n"
+ " \"ignore_unmapped\" : false,\n"
+ " \"boost\" : 1.0\n"
+ " }\n"
+ "}";
String json = """
{
"has_parent" : {
"query" : {
"term" : {
"tag" : {
"value" : "something",
"boost" : 1.0
}
}
},
"parent_type" : "blog",
"score" : true,
"ignore_unmapped" : false,
"boost" : 1.0
}
}""";
HasParentQueryBuilder parsed = (HasParentQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertEquals(json, "blog", parsed.type());

View file

@ -108,15 +108,15 @@ public class ParentIdQueryBuilderTests extends AbstractQueryTestCase<ParentIdQue
}
public void testFromJson() throws IOException {
String query = "{\n"
+ " \"parent_id\" : {\n"
+ " \"type\" : \"child\",\n"
+ " \"id\" : \"123\",\n"
+ " \"ignore_unmapped\" : false,\n"
+ " \"boost\" : 3.0,\n"
+ " \"_name\" : \"name\""
+ " }\n"
+ "}";
String query = """
{
"parent_id" : {
"type" : "child",
"id" : "123",
"ignore_unmapped" : false,
"boost" : 3.0,
"_name" : "name" }
}""";
ParentIdQueryBuilder queryBuilder = (ParentIdQueryBuilder) parseQuery(query);
checkGeneratedJson(query, queryBuilder);
assertThat(queryBuilder.getType(), Matchers.equalTo("child"));

View file

@ -253,7 +253,9 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
public void testFromJsonNoDocumentType() throws IOException {
SearchExecutionContext searchExecutionContext = createSearchExecutionContext();
QueryBuilder queryBuilder = parseQuery("{\"percolate\" : { \"document\": {}, \"field\":\"" + queryField + "\"}}");
QueryBuilder queryBuilder = parseQuery("""
{"percolate" : { "document": {}, "field":"%s"}}
""".formatted(queryField));
queryBuilder.toQuery(searchExecutionContext);
}
@ -264,23 +266,16 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
documentSource = Collections.singletonList(randomSource(new HashSet<>()));
SearchExecutionContext searchExecutionContext = createSearchExecutionContext();
QueryBuilder queryBuilder = parseQuery(
"{\"percolate\" : { \"index\": \""
+ indexedDocumentIndex
+ "\", \"id\": \""
+ indexedDocumentId
+ "\", \"field\":\""
+ queryField
+ "\"}}"
);
QueryBuilder queryBuilder = parseQuery("""
{"percolate" : { "index": "%s", "id": "%s", "field":"%s"}}
""".formatted(indexedDocumentIndex, indexedDocumentId, queryField));
rewriteAndFetch(queryBuilder, searchExecutionContext).toQuery(searchExecutionContext);
}
public void testBothDocumentAndDocumentsSpecified() {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> parseQuery("{\"percolate\" : { \"document\": {}, \"documents\": [{}, {}], \"field\":\"" + queryField + "\"}}")
);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery("""
{"percolate" : { "document": {}, "documents": [{}, {}], "field":"%s"}}
""".formatted(queryField)));
assertThat(e.getMessage(), containsString("The following fields are not allowed together: [document, documents]"));
}
@ -388,11 +383,9 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
public void testFromJsonWithDocumentType() throws IOException {
SearchExecutionContext searchExecutionContext = createSearchExecutionContext();
String queryAsString = "{\"percolate\" : { \"document\": {}, \"document_type\":\""
+ docType
+ "\", \"field\":\""
+ queryField
+ "\"}}";
String queryAsString = """
{"percolate" : { "document": {}, "document_type":"%s", "field":"%s"}}
""".formatted(docType, queryField);
XContentParser parser = createParserWithCompatibilityFor(JsonXContent.jsonXContent, queryAsString, RestApiVersion.V_7);
QueryBuilder queryBuilder = parseQuery(parser);
queryBuilder.toQuery(searchExecutionContext);
@ -406,13 +399,9 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
documentSource = Collections.singletonList(randomSource(new HashSet<>()));
SearchExecutionContext searchExecutionContext = createSearchExecutionContext();
String queryAsString = "{\"percolate\" : { \"index\": \""
+ indexedDocumentIndex
+ "\", \"type\": \"_doc\", \"id\": \""
+ indexedDocumentId
+ "\", \"field\":\""
+ queryField
+ "\"}}";
String queryAsString = """
{"percolate" : { "index": "%s", "type": "_doc", "id": "%s", "field":"%s"}}
""".formatted(indexedDocumentIndex, indexedDocumentId, queryField);
XContentParser parser = createParserWithCompatibilityFor(JsonXContent.jsonXContent, queryAsString, RestApiVersion.V_7);
QueryBuilder queryBuilder = parseQuery(parser);
rewriteAndFetch(queryBuilder, searchExecutionContext).toQuery(searchExecutionContext);

View file

@ -288,20 +288,13 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
assertEquals(expectedNdcg, detail.getNDCG(), 0.0);
assertEquals(unratedDocs, detail.getUnratedDocs());
if (idcg != 0) {
assertEquals(
"{\"dcg\":{\"dcg\":"
+ dcg
+ ",\"ideal_dcg\":"
+ idcg
+ ",\"normalized_dcg\":"
+ expectedNdcg
+ ",\"unrated_docs\":"
+ unratedDocs
+ "}}",
Strings.toString(detail)
);
assertEquals("""
{"dcg":{"dcg":%s,"ideal_dcg":%s,"normalized_dcg":%s,"unrated_docs":%s}}\
""".formatted(dcg, idcg, expectedNdcg, unratedDocs), Strings.toString(detail));
} else {
assertEquals("{\"dcg\":{\"dcg\":" + dcg + ",\"unrated_docs\":" + unratedDocs + "}}", Strings.toString(detail));
assertEquals("""
{"dcg":{"dcg":%s,"unrated_docs":%s}}\
""".formatted(dcg, unratedDocs), Strings.toString(detail));
}
}

Some files were not shown because too many files have changed in this diff Show more