mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-06-28 09:28:55 -04:00
Use Strings.format
instead of String.format(Locale.ROOT, ...)
in tests (#92106)
Use local-independent `Strings.format` method instead of `String.format(Locale.ROOT, ...)`. Inline `ESTestCase.forbidden` calls with `Strings.format` for the consistency sake. Add `Strings.format` alias in `common.Strings`
This commit is contained in:
parent
677766dd1f
commit
2bc7398754
340 changed files with 1343 additions and 1167 deletions
|
@ -43,6 +43,7 @@ import org.elasticsearch.common.io.FileSystemUtils;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.core.PathUtils;
|
import org.elasticsearch.core.PathUtils;
|
||||||
import org.elasticsearch.core.PathUtilsForTesting;
|
import org.elasticsearch.core.PathUtilsForTesting;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.core.SuppressForbidden;
|
import org.elasticsearch.core.SuppressForbidden;
|
||||||
import org.elasticsearch.core.Tuple;
|
import org.elasticsearch.core.Tuple;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
|
@ -505,7 +506,7 @@ public class InstallPluginActionTests extends ESTestCase {
|
||||||
final Path removing = env.v2().pluginsFile().resolve(".removing-failed");
|
final Path removing = env.v2().pluginsFile().resolve(".removing-failed");
|
||||||
Files.createDirectory(removing);
|
Files.createDirectory(removing);
|
||||||
final IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip));
|
final IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip));
|
||||||
final String expected = formatted(
|
final String expected = Strings.format(
|
||||||
"found file [%s] from a failed attempt to remove the plugin [failed]; execute [elasticsearch-plugin remove failed]",
|
"found file [%s] from a failed attempt to remove the plugin [failed]; execute [elasticsearch-plugin remove failed]",
|
||||||
removing
|
removing
|
||||||
);
|
);
|
||||||
|
|
|
@ -8,6 +8,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.server.cli;
|
package org.elasticsearch.server.cli;
|
||||||
|
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.test.ESTestCase.WithoutSecurityManager;
|
import org.elasticsearch.test.ESTestCase.WithoutSecurityManager;
|
||||||
|
|
||||||
|
@ -291,7 +292,7 @@ public class JvmOptionsParserTests extends ESTestCase {
|
||||||
|
|
||||||
final int javaMajorVersion = randomIntBetween(8, Integer.MAX_VALUE);
|
final int javaMajorVersion = randomIntBetween(8, Integer.MAX_VALUE);
|
||||||
final int smallerJavaMajorVersion = randomIntBetween(7, javaMajorVersion - 1);
|
final int smallerJavaMajorVersion = randomIntBetween(7, javaMajorVersion - 1);
|
||||||
final String invalidRangeLine = String.format(Locale.ROOT, "%d:%d-XX:+UseG1GC", javaMajorVersion, smallerJavaMajorVersion);
|
final String invalidRangeLine = Strings.format("%d:%d-XX:+UseG1GC", javaMajorVersion, smallerJavaMajorVersion);
|
||||||
try (StringReader sr = new StringReader(invalidRangeLine); BufferedReader br = new BufferedReader(sr)) {
|
try (StringReader sr = new StringReader(invalidRangeLine); BufferedReader br = new BufferedReader(sr)) {
|
||||||
assertInvalidLines(br, Collections.singletonMap(1, invalidRangeLine));
|
assertInvalidLines(br, Collections.singletonMap(1, invalidRangeLine));
|
||||||
}
|
}
|
||||||
|
@ -306,8 +307,8 @@ public class JvmOptionsParserTests extends ESTestCase {
|
||||||
);
|
);
|
||||||
try (StringReader sr = new StringReader(numberFormatExceptionsLine); BufferedReader br = new BufferedReader(sr)) {
|
try (StringReader sr = new StringReader(numberFormatExceptionsLine); BufferedReader br = new BufferedReader(sr)) {
|
||||||
final Map<Integer, String> invalidLines = new HashMap<>(2);
|
final Map<Integer, String> invalidLines = new HashMap<>(2);
|
||||||
invalidLines.put(1, formatted("%d:-XX:+UseG1GC", invalidLowerJavaMajorVersion));
|
invalidLines.put(1, Strings.format("%d:-XX:+UseG1GC", invalidLowerJavaMajorVersion));
|
||||||
invalidLines.put(2, formatted("8-%d:-XX:+AggressiveOpts", invalidUpperJavaMajorVersion));
|
invalidLines.put(2, Strings.format("8-%d:-XX:+AggressiveOpts", invalidUpperJavaMajorVersion));
|
||||||
assertInvalidLines(br, invalidLines);
|
assertInvalidLines(br, invalidLines);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -321,7 +322,7 @@ public class JvmOptionsParserTests extends ESTestCase {
|
||||||
|
|
||||||
final int lowerBound = randomIntBetween(9, 16);
|
final int lowerBound = randomIntBetween(9, 16);
|
||||||
final int upperBound = randomIntBetween(8, lowerBound - 1);
|
final int upperBound = randomIntBetween(8, lowerBound - 1);
|
||||||
final String upperBoundGreaterThanLowerBound = String.format(Locale.ROOT, "%d-%d-XX:+UseG1GC", lowerBound, upperBound);
|
final String upperBoundGreaterThanLowerBound = Strings.format("%d-%d-XX:+UseG1GC", lowerBound, upperBound);
|
||||||
try (StringReader sr = new StringReader(upperBoundGreaterThanLowerBound); BufferedReader br = new BufferedReader(sr)) {
|
try (StringReader sr = new StringReader(upperBoundGreaterThanLowerBound); BufferedReader br = new BufferedReader(sr)) {
|
||||||
assertInvalidLines(br, Collections.singletonMap(1, upperBoundGreaterThanLowerBound));
|
assertInvalidLines(br, Collections.singletonMap(1, upperBoundGreaterThanLowerBound));
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,7 @@ package org.elasticsearch.windows.service;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cli.Command;
|
import org.elasticsearch.cli.Command;
|
||||||
import org.elasticsearch.cli.ExitCodes;
|
import org.elasticsearch.cli.ExitCodes;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -153,13 +154,13 @@ public class WindowsServiceInstallCommandTests extends WindowsServiceCliTestCase
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDisplayName() throws Exception {
|
public void testDisplayName() throws Exception {
|
||||||
assertServiceArgs(Map.of("DisplayName", formatted("\"Elasticsearch %s (elasticsearch-service-x64)\"", Version.CURRENT)));
|
assertServiceArgs(Map.of("DisplayName", Strings.format("\"Elasticsearch %s (elasticsearch-service-x64)\"", Version.CURRENT)));
|
||||||
envVars.put("SERVICE_DISPLAY_NAME", "my service name");
|
envVars.put("SERVICE_DISPLAY_NAME", "my service name");
|
||||||
assertServiceArgs(Map.of("DisplayName", "\"my service name\""));
|
assertServiceArgs(Map.of("DisplayName", "\"my service name\""));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDescription() throws Exception {
|
public void testDescription() throws Exception {
|
||||||
String defaultDescription = formatted("\"Elasticsearch %s Windows Service - https://elastic.co\"", Version.CURRENT);
|
String defaultDescription = Strings.format("\"Elasticsearch %s Windows Service - https://elastic.co\"", Version.CURRENT);
|
||||||
assertServiceArgs(Map.of("Description", defaultDescription));
|
assertServiceArgs(Map.of("Description", defaultDescription));
|
||||||
envVars.put("SERVICE_DESCRIPTION", "my description");
|
envVars.put("SERVICE_DESCRIPTION", "my description");
|
||||||
assertServiceArgs(Map.of("Description", "\"my description\""));
|
assertServiceArgs(Map.of("Description", "\"my description\""));
|
||||||
|
|
|
@ -21,6 +21,7 @@ import org.elasticsearch.client.Request;
|
||||||
import org.elasticsearch.client.RestClient;
|
import org.elasticsearch.client.RestClient;
|
||||||
import org.elasticsearch.common.util.CollectionUtils;
|
import org.elasticsearch.common.util.CollectionUtils;
|
||||||
import org.elasticsearch.common.util.Maps;
|
import org.elasticsearch.common.util.Maps;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||||
import org.elasticsearch.test.rest.yaml.ClientYamlDocsTestClient;
|
import org.elasticsearch.test.rest.yaml.ClientYamlDocsTestClient;
|
||||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||||
|
@ -43,7 +44,6 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static java.util.Collections.emptyMap;
|
import static java.util.Collections.emptyMap;
|
||||||
|
@ -316,7 +316,7 @@ public class DocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||||
Object previousSecond = null;
|
Object previousSecond = null;
|
||||||
while (firstTokens.hasNext()) {
|
while (firstTokens.hasNext()) {
|
||||||
if (false == secondTokens.hasNext()) {
|
if (false == secondTokens.hasNext()) {
|
||||||
fail(String.format(Locale.ROOT, """
|
fail(Strings.format("""
|
||||||
%s has fewer tokens than %s. %s has [%s] but %s is out of tokens. \
|
%s has fewer tokens than %s. %s has [%s] but %s is out of tokens. \
|
||||||
%s's last token was [%s] and %s's last token was' [%s]
|
%s's last token was [%s] and %s's last token was' [%s]
|
||||||
""", second, first, first, firstTokens.next(), second, first, previousFirst, second, previousSecond));
|
""", second, first, first, firstTokens.next(), second, first, previousFirst, second, previousSecond));
|
||||||
|
@ -327,7 +327,7 @@ public class DocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||||
String secondText = (String) secondToken.get("token");
|
String secondText = (String) secondToken.get("token");
|
||||||
// Check the text and produce an error message with the utf8 sequence if they don't match.
|
// Check the text and produce an error message with the utf8 sequence if they don't match.
|
||||||
if (false == secondText.equals(firstText)) {
|
if (false == secondText.equals(firstText)) {
|
||||||
fail(String.format(Locale.ROOT, """
|
fail(Strings.format("""
|
||||||
text differs: %s was [%s] but %s was [%s]. In utf8 those are
|
text differs: %s was [%s] but %s was [%s]. In utf8 those are
|
||||||
%s and
|
%s and
|
||||||
%s
|
%s
|
||||||
|
@ -339,7 +339,7 @@ public class DocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||||
previousSecond = secondToken;
|
previousSecond = secondToken;
|
||||||
}
|
}
|
||||||
if (secondTokens.hasNext()) {
|
if (secondTokens.hasNext()) {
|
||||||
fail(String.format(Locale.ROOT, """
|
fail(Strings.format("""
|
||||||
%s has more tokens than %s. %s has [%s] but %s is out of tokens. \
|
%s has more tokens than %s. %s has [%s] but %s is out of tokens. \
|
||||||
%s's last token was [%s] and %s's last token was [%s]
|
%s's last token was [%s] and %s's last token was [%s]
|
||||||
""", second, first, second, secondTokens.next(), first, first, previousFirst, second, previousSecond));
|
""", second, first, second, secondTokens.next(), first, first, previousFirst, second, previousSecond));
|
||||||
|
|
|
@ -8,6 +8,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.core.internal.provider;
|
package org.elasticsearch.core.internal.provider;
|
||||||
|
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.core.SuppressForbidden;
|
import org.elasticsearch.core.SuppressForbidden;
|
||||||
import org.elasticsearch.core.internal.provider.EmbeddedImplClassLoader.CompoundEnumeration;
|
import org.elasticsearch.core.internal.provider.EmbeddedImplClassLoader.CompoundEnumeration;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
@ -452,7 +453,7 @@ public class EmbeddedImplClassLoaderTests extends ESTestCase {
|
||||||
// getResources
|
// getResources
|
||||||
var urls1 = Collections.list(urlcLoader.getResources(resourcePath)).stream().map(URL::toString).toList();
|
var urls1 = Collections.list(urlcLoader.getResources(resourcePath)).stream().map(URL::toString).toList();
|
||||||
var urls2 = Collections.list(embedLoader.getResources(resourcePath)).stream().map(URL::toString).toList();
|
var urls2 = Collections.list(embedLoader.getResources(resourcePath)).stream().map(URL::toString).toList();
|
||||||
assertThat(String.format(Locale.ROOT, "urls1=%s, urls2=%s", urls1, urls2), urls2, hasSize(1));
|
assertThat(Strings.format("urls1=%s, urls2=%s", urls1, urls2), urls2, hasSize(1));
|
||||||
assertThat(urls1.get(0), endsWith("!/" + expectedURLSuffix));
|
assertThat(urls1.get(0), endsWith("!/" + expectedURLSuffix));
|
||||||
assertThat(urls2.get(0), endsWith("impl.jar!/IMPL-JARS/res/res-impl.jar/" + expectedURLSuffix));
|
assertThat(urls2.get(0), endsWith("impl.jar!/IMPL-JARS/res/res-impl.jar/" + expectedURLSuffix));
|
||||||
|
|
||||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.CheckedBiConsumer;
|
||||||
import org.elasticsearch.common.network.InetAddresses;
|
import org.elasticsearch.common.network.InetAddresses;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.util.Maps;
|
import org.elasticsearch.common.util.Maps;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.IndexSettings;
|
import org.elasticsearch.index.IndexSettings;
|
||||||
import org.elasticsearch.index.mapper.BooleanFieldMapper;
|
import org.elasticsearch.index.mapper.BooleanFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||||
|
@ -859,7 +860,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
|
||||||
fullDocCount.clear();
|
fullDocCount.clear();
|
||||||
fullDocCount.putAll(skeletonDocCount);
|
fullDocCount.putAll(skeletonDocCount);
|
||||||
for (int minute = 3; minute < 15; minute++) {
|
for (int minute = 3; minute < 15; minute++) {
|
||||||
fullDocCount.put(formatted("2017-02-01T09:%02d:00.000Z", minute), 0);
|
fullDocCount.put(Strings.format("2017-02-01T09:%02d:00.000Z", minute), 0);
|
||||||
}
|
}
|
||||||
testSearchCase(
|
testSearchCase(
|
||||||
DEFAULT_QUERY,
|
DEFAULT_QUERY,
|
||||||
|
|
|
@ -13,6 +13,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||||
import org.elasticsearch.cluster.metadata.Metadata;
|
import org.elasticsearch.cluster.metadata.Metadata;
|
||||||
import org.elasticsearch.common.compress.CompressedXContent;
|
import org.elasticsearch.common.compress.CompressedXContent;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.core.TimeValue;
|
import org.elasticsearch.core.TimeValue;
|
||||||
import org.elasticsearch.core.Tuple;
|
import org.elasticsearch.core.Tuple;
|
||||||
import org.elasticsearch.index.IndexMode;
|
import org.elasticsearch.index.IndexMode;
|
||||||
|
@ -309,7 +310,7 @@ public class DataStreamIndexSettingsProviderTests extends ESTestCase {
|
||||||
assertThat(
|
assertThat(
|
||||||
e.getMessage(),
|
e.getMessage(),
|
||||||
equalTo(
|
equalTo(
|
||||||
formatted(
|
Strings.format(
|
||||||
"backing index [%s] in tsdb mode doesn't have the [index.time_series.end_time] index setting",
|
"backing index [%s] in tsdb mode doesn't have the [index.time_series.end_time] index setting",
|
||||||
DataStream.getDefaultBackingIndexName(dataStreamName, 1, twoHoursAgo.toEpochMilli())
|
DataStream.getDefaultBackingIndexName(dataStreamName, 1, twoHoursAgo.toEpochMilli())
|
||||||
)
|
)
|
||||||
|
|
|
@ -10,6 +10,7 @@ package org.elasticsearch.datastreams.mapper;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.MapperTestUtils;
|
import org.elasticsearch.index.MapperTestUtils;
|
||||||
import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper;
|
import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
|
@ -36,7 +37,7 @@ public class MetadataCreateDataStreamServiceTests extends ESTestCase {
|
||||||
public void testValidateTimestampFieldMappingNoFieldMapping() {
|
public void testValidateTimestampFieldMappingNoFieldMapping() {
|
||||||
Exception e = expectThrows(IllegalStateException.class, () -> validateTimestampFieldMapping(createMappingLookup("{}")));
|
Exception e = expectThrows(IllegalStateException.class, () -> validateTimestampFieldMapping(createMappingLookup("{}")));
|
||||||
assertThat(e.getMessage(), equalTo("[" + DataStreamTimestampFieldMapper.NAME + "] meta field has been disabled"));
|
assertThat(e.getMessage(), equalTo("[" + DataStreamTimestampFieldMapper.NAME + "] meta field has been disabled"));
|
||||||
String mapping1 = formatted("""
|
String mapping1 = Strings.format("""
|
||||||
{
|
{
|
||||||
"%s": {
|
"%s": {
|
||||||
"enabled": false
|
"enabled": false
|
||||||
|
|
|
@ -13,6 +13,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeRole;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.ingest.IngestStats;
|
import org.elasticsearch.ingest.IngestStats;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.script.MockScriptEngine;
|
import org.elasticsearch.script.MockScriptEngine;
|
||||||
|
@ -61,7 +62,7 @@ public class IngestRestartIT extends ESIntegTestCase {
|
||||||
internalCluster().ensureAtLeastNumDataNodes(1);
|
internalCluster().ensureAtLeastNumDataNodes(1);
|
||||||
internalCluster().startMasterOnlyNode();
|
internalCluster().startMasterOnlyNode();
|
||||||
final String pipelineId = "foo";
|
final String pipelineId = "foo";
|
||||||
client().admin().cluster().preparePutPipeline(pipelineId, new BytesArray(formatted("""
|
client().admin().cluster().preparePutPipeline(pipelineId, new BytesArray(Strings.format("""
|
||||||
{
|
{
|
||||||
"processors": [
|
"processors": [
|
||||||
{
|
{
|
||||||
|
@ -109,7 +110,7 @@ public class IngestRestartIT extends ESIntegTestCase {
|
||||||
String pipelineIdWithScript = pipelineIdWithoutScript + "_script";
|
String pipelineIdWithScript = pipelineIdWithoutScript + "_script";
|
||||||
internalCluster().startNode();
|
internalCluster().startNode();
|
||||||
|
|
||||||
BytesReference pipelineWithScript = new BytesArray(formatted("""
|
BytesReference pipelineWithScript = new BytesArray(Strings.format("""
|
||||||
{
|
{
|
||||||
"processors": [ { "script": { "lang": "%s", "source": "my_script" } } ]
|
"processors": [ { "script": { "lang": "%s", "source": "my_script" } } ]
|
||||||
}""", MockScriptEngine.NAME));
|
}""", MockScriptEngine.NAME));
|
||||||
|
@ -179,7 +180,7 @@ public class IngestRestartIT extends ESIntegTestCase {
|
||||||
public void testPipelineWithScriptProcessorThatHasStoredScript() throws Exception {
|
public void testPipelineWithScriptProcessorThatHasStoredScript() throws Exception {
|
||||||
internalCluster().startNode();
|
internalCluster().startNode();
|
||||||
|
|
||||||
client().admin().cluster().preparePutStoredScript().setId("1").setContent(new BytesArray(formatted("""
|
client().admin().cluster().preparePutStoredScript().setId("1").setContent(new BytesArray(Strings.format("""
|
||||||
{"script": {"lang": "%s", "source": "my_script"} }
|
{"script": {"lang": "%s", "source": "my_script"} }
|
||||||
""", MockScriptEngine.NAME)), XContentType.JSON).get();
|
""", MockScriptEngine.NAME)), XContentType.JSON).get();
|
||||||
BytesReference pipeline = new BytesArray("""
|
BytesReference pipeline = new BytesArray("""
|
||||||
|
|
|
@ -8,6 +8,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.ingest.common;
|
package org.elasticsearch.ingest.common;
|
||||||
|
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.ingest.IngestDocument;
|
import org.elasticsearch.ingest.IngestDocument;
|
||||||
import org.elasticsearch.ingest.RandomDocumentPicks;
|
import org.elasticsearch.ingest.RandomDocumentPicks;
|
||||||
import org.elasticsearch.ingest.TestTemplateService;
|
import org.elasticsearch.ingest.TestTemplateService;
|
||||||
|
@ -331,7 +332,7 @@ public class DateProcessorTests extends ESTestCase {
|
||||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
|
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
|
||||||
processor.execute(ingestDocument);
|
processor.execute(ingestDocument);
|
||||||
// output format is time only with nanosecond precision
|
// output format is time only with nanosecond precision
|
||||||
String expectedDate = "00:00:00." + formatted("%09d", nanosAfterEpoch);
|
String expectedDate = "00:00:00." + Strings.format("%09d", nanosAfterEpoch);
|
||||||
assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo(expectedDate));
|
assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo(expectedDate));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,6 +39,7 @@ import org.elasticsearch.common.io.Streams;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.core.CheckedConsumer;
|
import org.elasticsearch.core.CheckedConsumer;
|
||||||
import org.elasticsearch.core.CheckedRunnable;
|
import org.elasticsearch.core.CheckedRunnable;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.core.TimeValue;
|
import org.elasticsearch.core.TimeValue;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||||
|
@ -410,7 +411,7 @@ public class DatabaseNodeServiceTests extends ESTestCase {
|
||||||
byte[] header = new byte[512];
|
byte[] header = new byte[512];
|
||||||
byte[] nameBytes = name.getBytes(StandardCharsets.UTF_8);
|
byte[] nameBytes = name.getBytes(StandardCharsets.UTF_8);
|
||||||
byte[] contentBytes = content.getBytes(StandardCharsets.UTF_8);
|
byte[] contentBytes = content.getBytes(StandardCharsets.UTF_8);
|
||||||
byte[] sizeBytes = formatted("%1$012o", contentBytes.length).getBytes(StandardCharsets.UTF_8);
|
byte[] sizeBytes = Strings.format("%1$012o", contentBytes.length).getBytes(StandardCharsets.UTF_8);
|
||||||
System.arraycopy(nameBytes, 0, header, 0, nameBytes.length);
|
System.arraycopy(nameBytes, 0, header, 0, nameBytes.length);
|
||||||
System.arraycopy(sizeBytes, 0, header, 124, 12);
|
System.arraycopy(sizeBytes, 0, header, 124, 12);
|
||||||
gzipOutputStream.write(header);
|
gzipOutputStream.write(header);
|
||||||
|
|
|
@ -16,6 +16,7 @@ import org.elasticsearch.client.Request;
|
||||||
import org.elasticsearch.client.Response;
|
import org.elasticsearch.client.Response;
|
||||||
import org.elasticsearch.client.ResponseException;
|
import org.elasticsearch.client.ResponseException;
|
||||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||||
import org.elasticsearch.xcontent.json.JsonXContent;
|
import org.elasticsearch.xcontent.json.JsonXContent;
|
||||||
|
|
||||||
|
@ -69,7 +70,7 @@ public class KibanaSystemIndexIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testBulkToKibanaIndex() throws IOException {
|
public void testBulkToKibanaIndex() throws IOException {
|
||||||
Request request = request("POST", "/_bulk");
|
Request request = request("POST", "/_bulk");
|
||||||
request.setJsonEntity(formatted("""
|
request.setJsonEntity(Strings.format("""
|
||||||
{ "index" : { "_index" : "%s", "_id" : "1" } }
|
{ "index" : { "_index" : "%s", "_id" : "1" } }
|
||||||
{ "foo" : "bar" }
|
{ "foo" : "bar" }
|
||||||
""", indexName));
|
""", indexName));
|
||||||
|
@ -79,7 +80,7 @@ public class KibanaSystemIndexIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testRefresh() throws IOException {
|
public void testRefresh() throws IOException {
|
||||||
Request request = request("POST", "/_bulk");
|
Request request = request("POST", "/_bulk");
|
||||||
request.setJsonEntity(formatted("""
|
request.setJsonEntity(Strings.format("""
|
||||||
{ "index" : { "_index" : "%s", "_id" : "1" } }
|
{ "index" : { "_index" : "%s", "_id" : "1" } }
|
||||||
{ "foo" : "bar" }
|
{ "foo" : "bar" }
|
||||||
""", indexName));
|
""", indexName));
|
||||||
|
@ -100,7 +101,7 @@ public class KibanaSystemIndexIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testGetFromKibanaIndex() throws IOException {
|
public void testGetFromKibanaIndex() throws IOException {
|
||||||
Request request = request("POST", "/_bulk");
|
Request request = request("POST", "/_bulk");
|
||||||
request.setJsonEntity(formatted("""
|
request.setJsonEntity(Strings.format("""
|
||||||
{ "index" : { "_index" : "%s", "_id" : "1" } }
|
{ "index" : { "_index" : "%s", "_id" : "1" } }
|
||||||
{ "foo" : "bar" }
|
{ "foo" : "bar" }
|
||||||
""", indexName));
|
""", indexName));
|
||||||
|
@ -119,7 +120,7 @@ public class KibanaSystemIndexIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testMultiGetFromKibanaIndex() throws IOException {
|
public void testMultiGetFromKibanaIndex() throws IOException {
|
||||||
Request request = request("POST", "/_bulk");
|
Request request = request("POST", "/_bulk");
|
||||||
request.setJsonEntity(formatted("""
|
request.setJsonEntity(Strings.format("""
|
||||||
{ "index" : { "_index" : "%s", "_id" : "1" } }
|
{ "index" : { "_index" : "%s", "_id" : "1" } }
|
||||||
{ "foo" : "bar" }
|
{ "foo" : "bar" }
|
||||||
{ "index" : { "_index" : "%s", "_id" : "2" } }
|
{ "index" : { "_index" : "%s", "_id" : "2" } }
|
||||||
|
@ -131,7 +132,7 @@ public class KibanaSystemIndexIT extends ESRestTestCase {
|
||||||
assertThat(response.getStatusLine().getStatusCode(), is(200));
|
assertThat(response.getStatusLine().getStatusCode(), is(200));
|
||||||
|
|
||||||
Request getRequest = request("GET", "/_mget");
|
Request getRequest = request("GET", "/_mget");
|
||||||
getRequest.setJsonEntity(formatted("""
|
getRequest.setJsonEntity(Strings.format("""
|
||||||
{
|
{
|
||||||
"docs": [
|
"docs": [
|
||||||
{
|
{
|
||||||
|
@ -155,7 +156,7 @@ public class KibanaSystemIndexIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testSearchFromKibanaIndex() throws IOException {
|
public void testSearchFromKibanaIndex() throws IOException {
|
||||||
Request request = request("POST", "/_bulk");
|
Request request = request("POST", "/_bulk");
|
||||||
request.setJsonEntity(formatted("""
|
request.setJsonEntity(Strings.format("""
|
||||||
{ "index" : { "_index" : "%s", "_id" : "1" } }
|
{ "index" : { "_index" : "%s", "_id" : "1" } }
|
||||||
{ "foo" : "bar" }
|
{ "foo" : "bar" }
|
||||||
{ "index" : { "_index" : "%s", "_id" : "2" } }
|
{ "index" : { "_index" : "%s", "_id" : "2" } }
|
||||||
|
@ -181,7 +182,7 @@ public class KibanaSystemIndexIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testDeleteFromKibanaIndex() throws IOException {
|
public void testDeleteFromKibanaIndex() throws IOException {
|
||||||
Request request = request("POST", "/_bulk");
|
Request request = request("POST", "/_bulk");
|
||||||
request.setJsonEntity(formatted("""
|
request.setJsonEntity(Strings.format("""
|
||||||
{ "index" : { "_index" : "%s", "_id" : "1" } }
|
{ "index" : { "_index" : "%s", "_id" : "1" } }
|
||||||
{ "foo" : "bar" }
|
{ "foo" : "bar" }
|
||||||
{ "index" : { "_index" : "%s", "_id" : "2" } }
|
{ "index" : { "_index" : "%s", "_id" : "2" } }
|
||||||
|
@ -199,7 +200,7 @@ public class KibanaSystemIndexIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testDeleteByQueryFromKibanaIndex() throws IOException {
|
public void testDeleteByQueryFromKibanaIndex() throws IOException {
|
||||||
Request request = request("POST", "/_bulk");
|
Request request = request("POST", "/_bulk");
|
||||||
request.setJsonEntity(formatted("""
|
request.setJsonEntity(Strings.format("""
|
||||||
{ "index" : { "_index" : "%s", "_id" : "1" } }
|
{ "index" : { "_index" : "%s", "_id" : "1" } }
|
||||||
{ "foo" : "bar" }
|
{ "foo" : "bar" }
|
||||||
{ "index" : { "_index" : "%s", "_id" : "2" } }
|
{ "index" : { "_index" : "%s", "_id" : "2" } }
|
||||||
|
@ -289,7 +290,7 @@ public class KibanaSystemIndexIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testScrollingDocs() throws IOException {
|
public void testScrollingDocs() throws IOException {
|
||||||
Request request = request("POST", "/_bulk");
|
Request request = request("POST", "/_bulk");
|
||||||
request.setJsonEntity(formatted("""
|
request.setJsonEntity(Strings.format("""
|
||||||
{ "index" : { "_index" : "%s", "_id" : "1" } }
|
{ "index" : { "_index" : "%s", "_id" : "1" } }
|
||||||
{ "foo" : "bar" }
|
{ "foo" : "bar" }
|
||||||
{ "index" : { "_index" : "%s", "_id" : "2" } }
|
{ "index" : { "_index" : "%s", "_id" : "2" } }
|
||||||
|
|
|
@ -9,6 +9,7 @@ package org.elasticsearch.script.mustache;
|
||||||
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.script.ScriptEngine;
|
import org.elasticsearch.script.ScriptEngine;
|
||||||
import org.elasticsearch.script.ScriptException;
|
import org.elasticsearch.script.ScriptException;
|
||||||
import org.elasticsearch.script.TemplateScript;
|
import org.elasticsearch.script.TemplateScript;
|
||||||
|
@ -23,7 +24,6 @@ import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
@ -150,7 +150,7 @@ public class MustacheTests extends ESTestCase {
|
||||||
data.put("list", randomList);
|
data.put("list", randomList);
|
||||||
Map<String, Object> vars = new HashMap<>();
|
Map<String, Object> vars = new HashMap<>();
|
||||||
vars.put("data", data);
|
vars.put("data", data);
|
||||||
String expectedString = String.format(Locale.ROOT, "%s %s", randomArrayValues.length, randomList.size());
|
String expectedString = Strings.format("%s %s", randomArrayValues.length, randomList.size());
|
||||||
assertThat(factory.newInstance(vars).execute(), equalTo(expectedString));
|
assertThat(factory.newInstance(vars).execute(), equalTo(expectedString));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,10 +8,11 @@
|
||||||
|
|
||||||
package org.elasticsearch.painless;
|
package org.elasticsearch.painless;
|
||||||
|
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public class GetByPathAugmentationTests extends ScriptTestCase {
|
public class GetByPathAugmentationTests extends ScriptTestCase {
|
||||||
|
@ -38,20 +39,20 @@ public class GetByPathAugmentationTests extends ScriptTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private String toScript(String collection, String key) {
|
private String toScript(String collection, String key) {
|
||||||
return String.format(Locale.ROOT, "return %s.getByPath('%s')", collection, key);
|
return Strings.format("return %s.getByPath('%s')", collection, key);
|
||||||
}
|
}
|
||||||
|
|
||||||
private String toScript(String collection, String key, String defaultValue) {
|
private String toScript(String collection, String key, String defaultValue) {
|
||||||
return String.format(Locale.ROOT, "return %s.getByPath('%s', %s)", collection, key, defaultValue);
|
return Strings.format("return %s.getByPath('%s', %s)", collection, key, defaultValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
private String numberFormat(String unparsable, String path, int i) {
|
private String numberFormat(String unparsable, String path, int i) {
|
||||||
String format = "Could not parse [%s] as a int index into list at path [%s] and index [%d]";
|
String format = "Could not parse [%s] as a int index into list at path [%s] and index [%d]";
|
||||||
return String.format(Locale.ROOT, format, unparsable, path, i);
|
return Strings.format(format, unparsable, path, i);
|
||||||
}
|
}
|
||||||
|
|
||||||
private String missingValue(String path) {
|
private String missingValue(String path) {
|
||||||
return formatted("Could not find value at path [%s]", path);
|
return Strings.format("Could not find value at path [%s]", path);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertPathValue(String collection, String key, Object value) {
|
private void assertPathValue(String collection, String key, Object value) {
|
||||||
|
|
|
@ -8,6 +8,8 @@
|
||||||
|
|
||||||
package org.elasticsearch.painless;
|
package org.elasticsearch.painless;
|
||||||
|
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
@ -72,14 +74,14 @@ public class StringTests extends ScriptTestCase {
|
||||||
StringBuilder script = new StringBuilder("String s = \"cat\"; return s");
|
StringBuilder script = new StringBuilder("String s = \"cat\"; return s");
|
||||||
StringBuilder result = new StringBuilder("cat");
|
StringBuilder result = new StringBuilder("cat");
|
||||||
for (int i = 1; i < count; i++) {
|
for (int i = 1; i < count; i++) {
|
||||||
final String s = formatted("%03d", i);
|
final String s = Strings.format("%03d", i);
|
||||||
script.append(" + '").append(s).append("'.toString()");
|
script.append(" + '").append(s).append("'.toString()");
|
||||||
result.append(s);
|
result.append(s);
|
||||||
}
|
}
|
||||||
final String s = script.toString();
|
final String s = script.toString();
|
||||||
assertTrue(
|
assertTrue(
|
||||||
"every string part should be separately pushed to stack.",
|
"every string part should be separately pushed to stack.",
|
||||||
Debugger.toString(s).contains(formatted("LDC \"%03d\"", count / 2))
|
Debugger.toString(s).contains(Strings.format("LDC \"%03d\"", count / 2))
|
||||||
);
|
);
|
||||||
assertEquals(result.toString(), exec(s));
|
assertEquals(result.toString(), exec(s));
|
||||||
}
|
}
|
||||||
|
|
|
@ -111,7 +111,7 @@ public class RankFeatureQueryBuilderTests extends AbstractQueryTestCase<RankFeat
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIllegalField() {
|
public void testIllegalField() {
|
||||||
String query = formatted("""
|
String query = Strings.format("""
|
||||||
{
|
{
|
||||||
"rank_feature" : {
|
"rank_feature" : {
|
||||||
"field": "%s"
|
"field": "%s"
|
||||||
|
|
|
@ -17,6 +17,7 @@ import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
|
||||||
import org.elasticsearch.common.lucene.search.function.CombineFunction;
|
import org.elasticsearch.common.lucene.search.function.CombineFunction;
|
||||||
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
|
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.core.TimeValue;
|
import org.elasticsearch.core.TimeValue;
|
||||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||||
import org.elasticsearch.index.query.IdsQueryBuilder;
|
import org.elasticsearch.index.query.IdsQueryBuilder;
|
||||||
|
@ -843,11 +844,11 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
// index simple data
|
// index simple data
|
||||||
int childId = 0;
|
int childId = 0;
|
||||||
for (int i = 0; i < 10; i++) {
|
for (int i = 0; i < 10; i++) {
|
||||||
String parentId = formatted("p%03d", i);
|
String parentId = Strings.format("p%03d", i);
|
||||||
createIndexRequest("test", "parent", parentId, null, "p_field", parentId).get();
|
createIndexRequest("test", "parent", parentId, null, "p_field", parentId).get();
|
||||||
int j = childId;
|
int j = childId;
|
||||||
for (; j < childId + 50; j++) {
|
for (; j < childId + 50; j++) {
|
||||||
String childUid = formatted("c%03d", j);
|
String childUid = Strings.format("c%03d", j);
|
||||||
createIndexRequest("test", "child", childUid, parentId, "c_field", childUid).get();
|
createIndexRequest("test", "child", childUid, parentId, "c_field", childUid).get();
|
||||||
}
|
}
|
||||||
childId = j;
|
childId = j;
|
||||||
|
|
|
@ -252,7 +252,7 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
|
||||||
|
|
||||||
public void testFromJsonNoDocumentType() throws IOException {
|
public void testFromJsonNoDocumentType() throws IOException {
|
||||||
SearchExecutionContext searchExecutionContext = createSearchExecutionContext();
|
SearchExecutionContext searchExecutionContext = createSearchExecutionContext();
|
||||||
QueryBuilder queryBuilder = parseQuery(formatted("""
|
QueryBuilder queryBuilder = parseQuery(Strings.format("""
|
||||||
{"percolate" : { "document": {}, "field":"%s"}}
|
{"percolate" : { "document": {}, "field":"%s"}}
|
||||||
""", queryField));
|
""", queryField));
|
||||||
queryBuilder.toQuery(searchExecutionContext);
|
queryBuilder.toQuery(searchExecutionContext);
|
||||||
|
@ -265,14 +265,14 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
|
||||||
documentSource = Collections.singletonList(randomSource(new HashSet<>()));
|
documentSource = Collections.singletonList(randomSource(new HashSet<>()));
|
||||||
|
|
||||||
SearchExecutionContext searchExecutionContext = createSearchExecutionContext();
|
SearchExecutionContext searchExecutionContext = createSearchExecutionContext();
|
||||||
QueryBuilder queryBuilder = parseQuery(formatted("""
|
QueryBuilder queryBuilder = parseQuery(Strings.format("""
|
||||||
{"percolate" : { "index": "%s", "id": "%s", "field":"%s"}}
|
{"percolate" : { "index": "%s", "id": "%s", "field":"%s"}}
|
||||||
""", indexedDocumentIndex, indexedDocumentId, queryField));
|
""", indexedDocumentIndex, indexedDocumentId, queryField));
|
||||||
rewriteAndFetch(queryBuilder, searchExecutionContext).toQuery(searchExecutionContext);
|
rewriteAndFetch(queryBuilder, searchExecutionContext).toQuery(searchExecutionContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testBothDocumentAndDocumentsSpecified() {
|
public void testBothDocumentAndDocumentsSpecified() {
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(formatted("""
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(Strings.format("""
|
||||||
{"percolate" : { "document": {}, "documents": [{}, {}], "field":"%s"}}
|
{"percolate" : { "document": {}, "documents": [{}, {}], "field":"%s"}}
|
||||||
""", queryField)));
|
""", queryField)));
|
||||||
assertThat(e.getMessage(), containsString("The following fields are not allowed together: [document, documents]"));
|
assertThat(e.getMessage(), containsString("The following fields are not allowed together: [document, documents]"));
|
||||||
|
@ -382,7 +382,7 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
|
||||||
|
|
||||||
public void testFromJsonWithDocumentType() throws IOException {
|
public void testFromJsonWithDocumentType() throws IOException {
|
||||||
SearchExecutionContext searchExecutionContext = createSearchExecutionContext();
|
SearchExecutionContext searchExecutionContext = createSearchExecutionContext();
|
||||||
String queryAsString = formatted("""
|
String queryAsString = Strings.format("""
|
||||||
{"percolate" : { "document": {}, "document_type":"%s", "field":"%s"}}
|
{"percolate" : { "document": {}, "document_type":"%s", "field":"%s"}}
|
||||||
""", docType, queryField);
|
""", docType, queryField);
|
||||||
XContentParser parser = createParserWithCompatibilityFor(JsonXContent.jsonXContent, queryAsString, RestApiVersion.V_7);
|
XContentParser parser = createParserWithCompatibilityFor(JsonXContent.jsonXContent, queryAsString, RestApiVersion.V_7);
|
||||||
|
@ -398,7 +398,7 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
|
||||||
documentSource = Collections.singletonList(randomSource(new HashSet<>()));
|
documentSource = Collections.singletonList(randomSource(new HashSet<>()));
|
||||||
SearchExecutionContext searchExecutionContext = createSearchExecutionContext();
|
SearchExecutionContext searchExecutionContext = createSearchExecutionContext();
|
||||||
|
|
||||||
String queryAsString = formatted("""
|
String queryAsString = Strings.format("""
|
||||||
{"percolate" : { "index": "%s", "type": "_doc", "id": "%s", "field":"%s"}}
|
{"percolate" : { "index": "%s", "type": "_doc", "id": "%s", "field":"%s"}}
|
||||||
""", indexedDocumentIndex, indexedDocumentId, queryField);
|
""", indexedDocumentIndex, indexedDocumentId, queryField);
|
||||||
XContentParser parser = createParserWithCompatibilityFor(JsonXContent.jsonXContent, queryAsString, RestApiVersion.V_7);
|
XContentParser parser = createParserWithCompatibilityFor(JsonXContent.jsonXContent, queryAsString, RestApiVersion.V_7);
|
||||||
|
|
|
@ -288,11 +288,11 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
||||||
assertEquals(expectedNdcg, detail.getNDCG(), 0.0);
|
assertEquals(expectedNdcg, detail.getNDCG(), 0.0);
|
||||||
assertEquals(unratedDocs, detail.getUnratedDocs());
|
assertEquals(unratedDocs, detail.getUnratedDocs());
|
||||||
if (idcg != 0) {
|
if (idcg != 0) {
|
||||||
assertEquals(formatted("""
|
assertEquals(Strings.format("""
|
||||||
{"dcg":{"dcg":%s,"ideal_dcg":%s,"normalized_dcg":%s,"unrated_docs":%s}}\
|
{"dcg":{"dcg":%s,"ideal_dcg":%s,"normalized_dcg":%s,"unrated_docs":%s}}\
|
||||||
""", dcg, idcg, expectedNdcg, unratedDocs), Strings.toString(detail));
|
""", dcg, idcg, expectedNdcg, unratedDocs), Strings.toString(detail));
|
||||||
} else {
|
} else {
|
||||||
assertEquals(formatted("""
|
assertEquals(Strings.format("""
|
||||||
{"dcg":{"dcg":%s,"unrated_docs":%s}}\
|
{"dcg":{"dcg":%s,"unrated_docs":%s}}\
|
||||||
""", dcg, unratedDocs), Strings.toString(detail));
|
""", dcg, unratedDocs), Strings.toString(detail));
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,7 @@ import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.io.Streams;
|
import org.elasticsearch.common.io.Streams;
|
||||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.core.TimeValue;
|
import org.elasticsearch.core.TimeValue;
|
||||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
@ -228,7 +229,7 @@ public class RemoteRequestBuildersTests extends ESTestCase {
|
||||||
searchRequest.source().fetchSource(new String[] { "in1", "in2" }, new String[] { "out" });
|
searchRequest.source().fetchSource(new String[] { "in1", "in2" }, new String[] { "out" });
|
||||||
entity = initialSearch(searchRequest, new BytesArray(query), remoteVersion).getEntity();
|
entity = initialSearch(searchRequest, new BytesArray(query), remoteVersion).getEntity();
|
||||||
assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType().getValue());
|
assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType().getValue());
|
||||||
assertEquals(XContentHelper.stripWhitespace(formatted("""
|
assertEquals(XContentHelper.stripWhitespace(Strings.format("""
|
||||||
{
|
{
|
||||||
"query": %s,
|
"query": %s,
|
||||||
"_source": {
|
"_source": {
|
||||||
|
|
|
@ -205,7 +205,7 @@ public class GoogleCloudStorageBlobContainerRetriesTests extends AbstractBlobCon
|
||||||
assertThat(content.isPresent(), is(true));
|
assertThat(content.isPresent(), is(true));
|
||||||
assertThat(content.get().v1(), equalTo(blobContainer.path().buildAsString() + "write_blob_max_retries"));
|
assertThat(content.get().v1(), equalTo(blobContainer.path().buildAsString() + "write_blob_max_retries"));
|
||||||
if (Objects.deepEquals(bytes, BytesReference.toBytes(content.get().v2()))) {
|
if (Objects.deepEquals(bytes, BytesReference.toBytes(content.get().v2()))) {
|
||||||
byte[] response = formatted("""
|
byte[] response = Strings.format("""
|
||||||
{"bucket":"bucket","name":"%s"}
|
{"bucket":"bucket","name":"%s"}
|
||||||
""", content.get().v1()).getBytes(UTF_8);
|
""", content.get().v1()).getBytes(UTF_8);
|
||||||
exchange.getResponseHeaders().add("Content-Type", "application/json");
|
exchange.getResponseHeaders().add("Content-Type", "application/json");
|
||||||
|
@ -351,7 +351,7 @@ public class GoogleCloudStorageBlobContainerRetriesTests extends AbstractBlobCon
|
||||||
if (range.equals("bytes */*")) {
|
if (range.equals("bytes */*")) {
|
||||||
final int receivedSoFar = bytesReceived.get();
|
final int receivedSoFar = bytesReceived.get();
|
||||||
if (receivedSoFar > 0) {
|
if (receivedSoFar > 0) {
|
||||||
exchange.getResponseHeaders().add("Range", formatted("bytes=0-%d", receivedSoFar));
|
exchange.getResponseHeaders().add("Range", Strings.format("bytes=0-%d", receivedSoFar));
|
||||||
}
|
}
|
||||||
exchange.getResponseHeaders().add("Content-Length", "0");
|
exchange.getResponseHeaders().add("Content-Length", "0");
|
||||||
exchange.sendResponseHeaders(308 /* Resume Incomplete */, -1);
|
exchange.sendResponseHeaders(308 /* Resume Incomplete */, -1);
|
||||||
|
@ -373,7 +373,7 @@ public class GoogleCloudStorageBlobContainerRetriesTests extends AbstractBlobCon
|
||||||
exchange.sendResponseHeaders(RestStatus.OK.getStatus(), -1);
|
exchange.sendResponseHeaders(RestStatus.OK.getStatus(), -1);
|
||||||
return;
|
return;
|
||||||
} else {
|
} else {
|
||||||
exchange.getResponseHeaders().add("Range", String.format(Locale.ROOT, "bytes=%d/%d", rangeStart, rangeEnd));
|
exchange.getResponseHeaders().add("Range", Strings.format("bytes=%d/%d", rangeStart, rangeEnd));
|
||||||
exchange.getResponseHeaders().add("Content-Length", "0");
|
exchange.getResponseHeaders().add("Content-Length", "0");
|
||||||
exchange.sendResponseHeaders(308 /* Resume Incomplete */, -1);
|
exchange.sendResponseHeaders(308 /* Resume Incomplete */, -1);
|
||||||
return;
|
return;
|
||||||
|
|
|
@ -13,6 +13,7 @@ import com.google.auth.oauth2.ServiceAccountCredentials;
|
||||||
import org.elasticsearch.common.settings.MockSecureSettings;
|
import org.elasticsearch.common.settings.MockSecureSettings;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.core.TimeValue;
|
import org.elasticsearch.core.TimeValue;
|
||||||
import org.elasticsearch.core.Tuple;
|
import org.elasticsearch.core.Tuple;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
@ -260,7 +261,7 @@ public class GoogleCloudStorageClientSettingsTests extends ESTestCase {
|
||||||
credentialBuilder.setPrivateKeyId("private_key_id_" + clientName);
|
credentialBuilder.setPrivateKeyId("private_key_id_" + clientName);
|
||||||
credentialBuilder.setScopes(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL));
|
credentialBuilder.setScopes(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL));
|
||||||
final String encodedPrivateKey = Base64.getEncoder().encodeToString(keyPair.getPrivate().getEncoded());
|
final String encodedPrivateKey = Base64.getEncoder().encodeToString(keyPair.getPrivate().getEncoded());
|
||||||
final String serviceAccount = formatted("""
|
final String serviceAccount = Strings.format("""
|
||||||
{
|
{
|
||||||
"type": "service_account",
|
"type": "service_account",
|
||||||
"project_id": "project_id_%s",
|
"project_id": "project_id_%s",
|
||||||
|
|
|
@ -13,6 +13,7 @@ import com.sun.net.httpserver.HttpServer;
|
||||||
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.core.SuppressForbidden;
|
import org.elasticsearch.core.SuppressForbidden;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
import org.elasticsearch.mocksocket.MockHttpServer;
|
import org.elasticsearch.mocksocket.MockHttpServer;
|
||||||
|
@ -53,7 +54,7 @@ public class CustomWebIdentityTokenCredentialsProviderTests extends ESTestCase {
|
||||||
assertEquals(ROLE_NAME, params.get("RoleSessionName"));
|
assertEquals(ROLE_NAME, params.get("RoleSessionName"));
|
||||||
|
|
||||||
exchange.getResponseHeaders().add("Content-Type", "text/xml; charset=UTF-8");
|
exchange.getResponseHeaders().add("Content-Type", "text/xml; charset=UTF-8");
|
||||||
byte[] response = formatted(
|
byte[] response = Strings.format(
|
||||||
"""
|
"""
|
||||||
<AssumeRoleWithWebIdentityResponse xmlns="https://sts.amazonaws.com/doc/2011-06-15/">
|
<AssumeRoleWithWebIdentityResponse xmlns="https://sts.amazonaws.com/doc/2011-06-15/">
|
||||||
<AssumeRoleWithWebIdentityResult>
|
<AssumeRoleWithWebIdentityResult>
|
||||||
|
|
|
@ -13,6 +13,7 @@ import org.elasticsearch.common.blobstore.BlobContainer;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.io.Streams;
|
import org.elasticsearch.common.io.Streams;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -57,7 +58,7 @@ public abstract class AbstractURLBlobStoreTests extends ESTestCase {
|
||||||
ignored.read();
|
ignored.read();
|
||||||
fail("Should have thrown NoSuchFileException exception");
|
fail("Should have thrown NoSuchFileException exception");
|
||||||
} catch (NoSuchFileException e) {
|
} catch (NoSuchFileException e) {
|
||||||
assertEquals(formatted("blob object [%s] not found", incorrectBlobName), e.getMessage());
|
assertEquals(Strings.format("blob object [%s] not found", incorrectBlobName), e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,6 +10,7 @@ package org.elasticsearch.common.blobstore.url.http;
|
||||||
|
|
||||||
import org.elasticsearch.common.io.Streams;
|
import org.elasticsearch.common.io.Streams;
|
||||||
import org.elasticsearch.core.IOUtils;
|
import org.elasticsearch.core.IOUtils;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.rest.RestStatus;
|
import org.elasticsearch.rest.RestStatus;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
@ -17,7 +18,6 @@ import java.io.IOException;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ public class RetryingHttpInputStreamTests extends ESTestCase {
|
||||||
when(secondHttpResponseInputStream.read(any(), anyInt(), anyInt())).thenReturn(blobSize - firstChunkSize).thenReturn(-1);
|
when(secondHttpResponseInputStream.read(any(), anyInt(), anyInt())).thenReturn(blobSize - firstChunkSize).thenReturn(-1);
|
||||||
final Map<String, String> secondResponseHeaders = Map.of(
|
final Map<String, String> secondResponseHeaders = Map.of(
|
||||||
"Content-Range",
|
"Content-Range",
|
||||||
String.format(Locale.ROOT, "bytes %d-%d/%d", firstChunkSize, blobSize - 1, blobSize)
|
Strings.format("bytes %d-%d/%d", firstChunkSize, blobSize - 1, blobSize)
|
||||||
);
|
);
|
||||||
|
|
||||||
final List<MockHttpResponse> responses = List.of(
|
final List<MockHttpResponse> responses = List.of(
|
||||||
|
|
|
@ -13,6 +13,7 @@ import io.netty.util.ReferenceCounted;
|
||||||
|
|
||||||
import org.elasticsearch.ESNetty4IntegTestCase;
|
import org.elasticsearch.ESNetty4IntegTestCase;
|
||||||
import org.elasticsearch.common.transport.TransportAddress;
|
import org.elasticsearch.common.transport.TransportAddress;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.http.HttpServerTransport;
|
import org.elasticsearch.http.HttpServerTransport;
|
||||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||||
|
@ -53,7 +54,7 @@ public class Netty4PipeliningIT extends ESNetty4IntegTestCase {
|
||||||
private void assertOpaqueIdsInOrder(Collection<String> opaqueIds) {
|
private void assertOpaqueIdsInOrder(Collection<String> opaqueIds) {
|
||||||
// check if opaque ids are monotonically increasing
|
// check if opaque ids are monotonically increasing
|
||||||
int i = 0;
|
int i = 0;
|
||||||
String msg = formatted("Expected list of opaque ids to be monotonically increasing, got [%s]", opaqueIds);
|
String msg = Strings.format("Expected list of opaque ids to be monotonically increasing, got [%s]", opaqueIds);
|
||||||
for (String opaqueId : opaqueIds) {
|
for (String opaqueId : opaqueIds) {
|
||||||
assertThat(msg, opaqueId, is(String.valueOf(i++)));
|
assertThat(msg, opaqueId, is(String.valueOf(i++)));
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,13 +14,12 @@ import org.elasticsearch.common.network.NetworkAddress;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.transport.BoundTransportAddress;
|
import org.elasticsearch.common.transport.BoundTransportAddress;
|
||||||
import org.elasticsearch.common.transport.TransportAddress;
|
import org.elasticsearch.common.transport.TransportAddress;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||||
import org.elasticsearch.test.junit.annotations.Network;
|
import org.elasticsearch.test.junit.annotations.Network;
|
||||||
import org.elasticsearch.transport.TransportInfo;
|
import org.elasticsearch.transport.TransportInfo;
|
||||||
|
|
||||||
import java.util.Locale;
|
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.allOf;
|
import static org.hamcrest.Matchers.allOf;
|
||||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||||
import static org.hamcrest.Matchers.hasKey;
|
import static org.hamcrest.Matchers.hasKey;
|
||||||
|
@ -39,7 +38,7 @@ public class Netty4TransportMultiPortIntegrationIT extends ESNetty4IntegTestCase
|
||||||
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
|
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
|
||||||
if (randomPort == -1) {
|
if (randomPort == -1) {
|
||||||
randomPort = randomIntBetween(49152, 65525);
|
randomPort = randomIntBetween(49152, 65525);
|
||||||
randomPortRange = String.format(Locale.ROOT, "%s-%s", randomPort, randomPort + 10);
|
randomPortRange = Strings.format("%s-%s", randomPort, randomPort + 10);
|
||||||
}
|
}
|
||||||
Settings.Builder builder = Settings.builder()
|
Settings.Builder builder = Settings.builder()
|
||||||
.put(super.nodeSettings(nodeOrdinal, otherSettings))
|
.put(super.nodeSettings(nodeOrdinal, otherSettings))
|
||||||
|
|
|
@ -22,6 +22,7 @@ import org.elasticsearch.client.Request;
|
||||||
import org.elasticsearch.client.Response;
|
import org.elasticsearch.client.Response;
|
||||||
import org.elasticsearch.client.RestClient;
|
import org.elasticsearch.client.RestClient;
|
||||||
import org.elasticsearch.core.PathUtils;
|
import org.elasticsearch.core.PathUtils;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
|
|
||||||
|
@ -100,7 +101,7 @@ public class HaHdfsFailoverTestSuiteIT extends ESRestTestCase {
|
||||||
// Create repository
|
// Create repository
|
||||||
{
|
{
|
||||||
Request request = new Request("PUT", "/_snapshot/hdfs_ha_repo_read");
|
Request request = new Request("PUT", "/_snapshot/hdfs_ha_repo_read");
|
||||||
request.setJsonEntity(formatted("""
|
request.setJsonEntity(Strings.format("""
|
||||||
{
|
{
|
||||||
"type": "hdfs",
|
"type": "hdfs",
|
||||||
"settings": {
|
"settings": {
|
||||||
|
|
|
@ -569,7 +569,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
||||||
public void testRollover() throws IOException {
|
public void testRollover() throws IOException {
|
||||||
if (isRunningAgainstOldCluster()) {
|
if (isRunningAgainstOldCluster()) {
|
||||||
Request createIndex = new Request("PUT", "/" + index + "-000001");
|
Request createIndex = new Request("PUT", "/" + index + "-000001");
|
||||||
createIndex.setJsonEntity(formatted("""
|
createIndex.setJsonEntity(Strings.format("""
|
||||||
{
|
{
|
||||||
"aliases": {
|
"aliases": {
|
||||||
"%s_write": {}
|
"%s_write": {}
|
||||||
|
@ -995,7 +995,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
||||||
|
|
||||||
// Stick a routing attribute into to cluster settings so we can see it after the restore
|
// Stick a routing attribute into to cluster settings so we can see it after the restore
|
||||||
Request addRoutingSettings = new Request("PUT", "/_cluster/settings");
|
Request addRoutingSettings = new Request("PUT", "/_cluster/settings");
|
||||||
addRoutingSettings.setJsonEntity(formatted("""
|
addRoutingSettings.setJsonEntity(Strings.format("""
|
||||||
{"persistent": {"cluster.routing.allocation.exclude.test_attr": "%s"}}
|
{"persistent": {"cluster.routing.allocation.exclude.test_attr": "%s"}}
|
||||||
""", getOldClusterVersion()));
|
""", getOldClusterVersion()));
|
||||||
client().performRequest(addRoutingSettings);
|
client().performRequest(addRoutingSettings);
|
||||||
|
@ -1296,7 +1296,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
||||||
int extras = between(1, 100);
|
int extras = between(1, 100);
|
||||||
StringBuilder bulk = new StringBuilder();
|
StringBuilder bulk = new StringBuilder();
|
||||||
for (int i = 0; i < extras; i++) {
|
for (int i = 0; i < extras; i++) {
|
||||||
bulk.append(formatted("""
|
bulk.append(Strings.format("""
|
||||||
{"index":{"_id":"%s"}}
|
{"index":{"_id":"%s"}}
|
||||||
{"test":"test"}
|
{"test":"test"}
|
||||||
""", count + i));
|
""", count + i));
|
||||||
|
|
|
@ -209,7 +209,7 @@ public class QueryBuilderBWCIT extends AbstractFullClusterRestartTestCase {
|
||||||
for (int i = 0; i < CANDIDATES.size(); i++) {
|
for (int i = 0; i < CANDIDATES.size(); i++) {
|
||||||
QueryBuilder expectedQueryBuilder = (QueryBuilder) CANDIDATES.get(i)[1];
|
QueryBuilder expectedQueryBuilder = (QueryBuilder) CANDIDATES.get(i)[1];
|
||||||
Request request = new Request("GET", "/" + index + "/_search");
|
Request request = new Request("GET", "/" + index + "/_search");
|
||||||
request.setJsonEntity(formatted("""
|
request.setJsonEntity(Strings.format("""
|
||||||
{"query": {"ids": {"values": ["%s"]}}, "docvalue_fields": [{"field":"query.query_builder_field"}]}
|
{"query": {"ids": {"values": ["%s"]}}, "docvalue_fields": [{"field":"query.query_builder_field"}]}
|
||||||
""", i));
|
""", i));
|
||||||
Response rsp = client().performRequest(request);
|
Response rsp = client().performRequest(request);
|
||||||
|
|
|
@ -7,12 +7,11 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.common.logging;
|
package org.elasticsearch.common.logging;
|
||||||
|
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.hamcrest.Matchers;
|
import org.hamcrest.Matchers;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
|
|
||||||
import java.util.Locale;
|
|
||||||
|
|
||||||
public class ESJsonLayoutTests extends ESTestCase {
|
public class ESJsonLayoutTests extends ESTestCase {
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void initNodeName() {
|
public static void initNodeName() {
|
||||||
|
@ -27,7 +26,7 @@ public class ESJsonLayoutTests extends ESTestCase {
|
||||||
ESJsonLayout server = ESJsonLayout.newBuilder().setType("server").build();
|
ESJsonLayout server = ESJsonLayout.newBuilder().setType("server").build();
|
||||||
String conversionPattern = server.getPatternLayout().getConversionPattern();
|
String conversionPattern = server.getPatternLayout().getConversionPattern();
|
||||||
|
|
||||||
assertThat(conversionPattern, Matchers.equalTo(String.format(Locale.ROOT, """
|
assertThat(conversionPattern, Matchers.equalTo(Strings.format("""
|
||||||
{\
|
{\
|
||||||
"type": "server", \
|
"type": "server", \
|
||||||
"timestamp": "%%d{yyyy-MM-dd'T'HH:mm:ss,SSSZZ}", \
|
"timestamp": "%%d{yyyy-MM-dd'T'HH:mm:ss,SSSZZ}", \
|
||||||
|
@ -45,7 +44,7 @@ public class ESJsonLayoutTests extends ESTestCase {
|
||||||
String conversionPattern = server.getPatternLayout().getConversionPattern();
|
String conversionPattern = server.getPatternLayout().getConversionPattern();
|
||||||
|
|
||||||
// message field is removed as is expected to be provided by a field from a message
|
// message field is removed as is expected to be provided by a field from a message
|
||||||
assertThat(conversionPattern, Matchers.equalTo(String.format(Locale.ROOT, """
|
assertThat(conversionPattern, Matchers.equalTo(Strings.format("""
|
||||||
{\
|
{\
|
||||||
"type": "server", \
|
"type": "server", \
|
||||||
"timestamp": "%%d{yyyy-MM-dd'T'HH:mm:ss,SSSZZ}", \
|
"timestamp": "%%d{yyyy-MM-dd'T'HH:mm:ss,SSSZZ}", \
|
||||||
|
|
|
@ -13,6 +13,7 @@ import org.elasticsearch.client.Response;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||||
import org.hamcrest.Matchers;
|
import org.hamcrest.Matchers;
|
||||||
|
|
||||||
|
@ -30,10 +31,11 @@ public class RareTermsIT extends ESRestTestCase {
|
||||||
final Request request = new Request("POST", "/_bulk");
|
final Request request = new Request("POST", "/_bulk");
|
||||||
final StringBuilder builder = new StringBuilder();
|
final StringBuilder builder = new StringBuilder();
|
||||||
for (int i = 0; i < numDocs; ++i) {
|
for (int i = 0; i < numDocs; ++i) {
|
||||||
builder.append(formatted("""
|
Object[] args = new Object[] { index, id++, i };
|
||||||
|
builder.append(Strings.format("""
|
||||||
{ "index" : { "_index" : "%s", "_id": "%s" } }
|
{ "index" : { "_index" : "%s", "_id": "%s" } }
|
||||||
{"str_value" : "s%s"}
|
{"str_value" : "s%s"}
|
||||||
""", index, id++, i));
|
""", args));
|
||||||
}
|
}
|
||||||
request.setJsonEntity(builder.toString());
|
request.setJsonEntity(builder.toString());
|
||||||
assertOK(client().performRequest(request));
|
assertOK(client().performRequest(request));
|
||||||
|
@ -62,7 +64,7 @@ public class RareTermsIT extends ESRestTestCase {
|
||||||
|
|
||||||
private void assertNumRareTerms(int maxDocs, int rareTerms) throws IOException {
|
private void assertNumRareTerms(int maxDocs, int rareTerms) throws IOException {
|
||||||
final Request request = new Request("POST", index + "/_search");
|
final Request request = new Request("POST", index + "/_search");
|
||||||
request.setJsonEntity(formatted("""
|
request.setJsonEntity(Strings.format("""
|
||||||
{
|
{
|
||||||
"aggs": {
|
"aggs": {
|
||||||
"rareTerms": {
|
"rareTerms": {
|
||||||
|
|
|
@ -18,6 +18,7 @@ import org.elasticsearch.client.RestClient;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.core.CheckedRunnable;
|
import org.elasticsearch.core.CheckedRunnable;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.rest.RestStatus;
|
import org.elasticsearch.rest.RestStatus;
|
||||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||||
import org.elasticsearch.test.rest.ObjectPath;
|
import org.elasticsearch.test.rest.ObjectPath;
|
||||||
|
@ -84,7 +85,7 @@ public class SearchWithMinCompatibleSearchNodeIT extends ESRestTestCase {
|
||||||
);
|
);
|
||||||
assertThat(responseException.getMessage(), containsString("""
|
assertThat(responseException.getMessage(), containsString("""
|
||||||
{"error":{"root_cause":[],"type":"search_phase_execution_exception\""""));
|
{"error":{"root_cause":[],"type":"search_phase_execution_exception\""""));
|
||||||
assertThat(responseException.getMessage(), containsString(formatted("""
|
assertThat(responseException.getMessage(), containsString(Strings.format("""
|
||||||
caused_by":{"type":"version_mismatch_exception",\
|
caused_by":{"type":"version_mismatch_exception",\
|
||||||
"reason":"One of the shards is incompatible with the required minimum version [%s]\"""", newVersion)));
|
"reason":"One of the shards is incompatible with the required minimum version [%s]\"""", newVersion)));
|
||||||
});
|
});
|
||||||
|
|
|
@ -280,7 +280,7 @@ public class MultiVersionRepositoryAccessIT extends ESRestTestCase {
|
||||||
|
|
||||||
private void createIndex(String name, int shards) throws IOException {
|
private void createIndex(String name, int shards) throws IOException {
|
||||||
final Request putIndexRequest = new Request("PUT", "/" + name);
|
final Request putIndexRequest = new Request("PUT", "/" + name);
|
||||||
putIndexRequest.setJsonEntity(formatted("""
|
putIndexRequest.setJsonEntity(Strings.format("""
|
||||||
{
|
{
|
||||||
"settings" : {
|
"settings" : {
|
||||||
"index" : {
|
"index" : {
|
||||||
|
|
|
@ -135,7 +135,7 @@ public class IndexingIT extends AbstractRollingTestCase {
|
||||||
|
|
||||||
public void testAutoIdWithOpTypeCreate() throws IOException {
|
public void testAutoIdWithOpTypeCreate() throws IOException {
|
||||||
final String indexName = "auto_id_and_op_type_create_index";
|
final String indexName = "auto_id_and_op_type_create_index";
|
||||||
String b = formatted("""
|
String b = Strings.format("""
|
||||||
{"create": {"_index": "%s"}}
|
{"create": {"_index": "%s"}}
|
||||||
{"f1": "v"}
|
{"f1": "v"}
|
||||||
""", indexName);
|
""", indexName);
|
||||||
|
@ -325,7 +325,7 @@ public class IndexingIT extends AbstractRollingTestCase {
|
||||||
long delta = TimeUnit.SECONDS.toMillis(20);
|
long delta = TimeUnit.SECONDS.toMillis(20);
|
||||||
double value = (timeStart - TSDB_TIMES[0]) / TimeUnit.SECONDS.toMillis(20) * rate;
|
double value = (timeStart - TSDB_TIMES[0]) / TimeUnit.SECONDS.toMillis(20) * rate;
|
||||||
for (long t = timeStart; t < timeEnd; t += delta) {
|
for (long t = timeStart; t < timeEnd; t += delta) {
|
||||||
bulk.append(formatted("""
|
bulk.append(Strings.format("""
|
||||||
{"index": {"_index": "tsdb"}}
|
{"index": {"_index": "tsdb"}}
|
||||||
{"@timestamp": %s, "dim": "%s", "value": %s}
|
{"@timestamp": %s, "dim": "%s", "value": %s}
|
||||||
""", t, dim, value));
|
""", t, dim, value));
|
||||||
|
|
|
@ -13,9 +13,9 @@ import org.elasticsearch.client.Request;
|
||||||
import org.elasticsearch.client.Response;
|
import org.elasticsearch.client.Response;
|
||||||
import org.elasticsearch.client.ResponseException;
|
import org.elasticsearch.client.ResponseException;
|
||||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM;
|
import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM;
|
||||||
|
@ -56,7 +56,7 @@ public class UpgradeWithOldIndexSettingsIT extends AbstractRollingTestCase {
|
||||||
if (UPGRADE_FROM_VERSION.before(Version.V_8_0_0)) {
|
if (UPGRADE_FROM_VERSION.before(Version.V_8_0_0)) {
|
||||||
bulk.setOptions(expectWarnings(EXPECTED_WARNING));
|
bulk.setOptions(expectWarnings(EXPECTED_WARNING));
|
||||||
}
|
}
|
||||||
bulk.setJsonEntity(String.format(Locale.ROOT, """
|
bulk.setJsonEntity(Strings.format("""
|
||||||
{"index": {"_index": "%s"}}
|
{"index": {"_index": "%s"}}
|
||||||
{"f1": "v1", "f2": "v2"}
|
{"f1": "v1", "f2": "v2"}
|
||||||
""", INDEX_NAME));
|
""", INDEX_NAME));
|
||||||
|
@ -69,7 +69,7 @@ public class UpgradeWithOldIndexSettingsIT extends AbstractRollingTestCase {
|
||||||
if (UPGRADE_FROM_VERSION.before(Version.V_8_0_0)) {
|
if (UPGRADE_FROM_VERSION.before(Version.V_8_0_0)) {
|
||||||
bulk.setOptions(expectWarnings(EXPECTED_WARNING));
|
bulk.setOptions(expectWarnings(EXPECTED_WARNING));
|
||||||
}
|
}
|
||||||
bulk.setJsonEntity(String.format(Locale.ROOT, """
|
bulk.setJsonEntity(Strings.format("""
|
||||||
{"index": {"_index": "%s"}}
|
{"index": {"_index": "%s"}}
|
||||||
{"f1": "v3", "f2": "v4"}
|
{"f1": "v3", "f2": "v4"}
|
||||||
""", INDEX_NAME));
|
""", INDEX_NAME));
|
||||||
|
|
|
@ -366,7 +366,7 @@ public final class ClusterAllocationExplainIT extends ESIntegTestCase {
|
||||||
assertThat(d.getExplanation(), startsWith("a copy of this shard is already allocated to this node ["));
|
assertThat(d.getExplanation(), startsWith("a copy of this shard is already allocated to this node ["));
|
||||||
} else if (d.label().equals("filter") && nodeHoldingPrimary == false) {
|
} else if (d.label().equals("filter") && nodeHoldingPrimary == false) {
|
||||||
assertEquals(Decision.Type.NO, d.type());
|
assertEquals(Decision.Type.NO, d.type());
|
||||||
assertEquals(formatted("""
|
assertEquals(Strings.format("""
|
||||||
node does not match index setting [index.routing.allocation.include] \
|
node does not match index setting [index.routing.allocation.include] \
|
||||||
filters [_name:"%s"]\
|
filters [_name:"%s"]\
|
||||||
""", primaryNodeName), d.getExplanation());
|
""", primaryNodeName), d.getExplanation());
|
||||||
|
@ -914,7 +914,7 @@ public final class ClusterAllocationExplainIT extends ESIntegTestCase {
|
||||||
for (Decision d : result.getCanAllocateDecision().getDecisions()) {
|
for (Decision d : result.getCanAllocateDecision().getDecisions()) {
|
||||||
if (d.label().equals("filter")) {
|
if (d.label().equals("filter")) {
|
||||||
assertEquals(Decision.Type.NO, d.type());
|
assertEquals(Decision.Type.NO, d.type());
|
||||||
assertEquals(formatted("""
|
assertEquals(Strings.format("""
|
||||||
node does not match index setting [index.routing.allocation.include] filters [_name:"%s"]\
|
node does not match index setting [index.routing.allocation.include] filters [_name:"%s"]\
|
||||||
""", primaryNodeName), d.getExplanation());
|
""", primaryNodeName), d.getExplanation());
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -837,8 +837,8 @@ public class RolloverIT extends ESIntegTestCase {
|
||||||
.prepareRolloverIndex(aliasName)
|
.prepareRolloverIndex(aliasName)
|
||||||
.waitForActiveShards(ActiveShardCount.NONE)
|
.waitForActiveShards(ActiveShardCount.NONE)
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getOldIndex(), equalTo(aliasName + formatted("-%06d", j)));
|
assertThat(response.getOldIndex(), equalTo(aliasName + Strings.format("-%06d", j)));
|
||||||
assertThat(response.getNewIndex(), equalTo(aliasName + formatted("-%06d", j + 1)));
|
assertThat(response.getNewIndex(), equalTo(aliasName + Strings.format("-%06d", j + 1)));
|
||||||
assertThat(response.isDryRun(), equalTo(false));
|
assertThat(response.isDryRun(), equalTo(false));
|
||||||
assertThat(response.isRolledOver(), equalTo(true));
|
assertThat(response.isRolledOver(), equalTo(true));
|
||||||
}
|
}
|
||||||
|
@ -859,7 +859,7 @@ public class RolloverIT extends ESIntegTestCase {
|
||||||
for (int j = 1; j <= numOfIndices; j++) {
|
for (int j = 1; j <= numOfIndices; j++) {
|
||||||
AliasMetadata.Builder amBuilder = new AliasMetadata.Builder(aliasName);
|
AliasMetadata.Builder amBuilder = new AliasMetadata.Builder(aliasName);
|
||||||
amBuilder.writeIndex(j == numOfIndices);
|
amBuilder.writeIndex(j == numOfIndices);
|
||||||
expected.add(Map.entry(aliasName + formatted("-%06d", j), List.of(amBuilder.build())));
|
expected.add(Map.entry(aliasName + Strings.format("-%06d", j), List.of(amBuilder.build())));
|
||||||
}
|
}
|
||||||
assertThat(actual, containsInAnyOrder(expected.toArray(Object[]::new)));
|
assertThat(actual, containsInAnyOrder(expected.toArray(Object[]::new)));
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,7 @@ import org.apache.lucene.search.TotalHits;
|
||||||
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
|
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
|
||||||
import org.elasticsearch.client.internal.Client;
|
import org.elasticsearch.client.internal.Client;
|
||||||
import org.elasticsearch.client.internal.node.NodeClient;
|
import org.elasticsearch.client.internal.node.NodeClient;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.query.QueryBuilders;
|
import org.elasticsearch.index.query.QueryBuilders;
|
||||||
import org.elasticsearch.search.SearchShardTarget;
|
import org.elasticsearch.search.SearchShardTarget;
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||||
|
@ -184,7 +185,7 @@ public class SearchProgressActionListenerIT extends ESSingleNodeTestCase {
|
||||||
private static List<SearchShard> createRandomIndices(Client client) {
|
private static List<SearchShard> createRandomIndices(Client client) {
|
||||||
int numIndices = randomIntBetween(3, 20);
|
int numIndices = randomIntBetween(3, 20);
|
||||||
for (int i = 0; i < numIndices; i++) {
|
for (int i = 0; i < numIndices; i++) {
|
||||||
String indexName = formatted("index-%03d", i);
|
String indexName = Strings.format("index-%03d", i);
|
||||||
assertAcked(client.admin().indices().prepareCreate(indexName).get());
|
assertAcked(client.admin().indices().prepareCreate(indexName).get());
|
||||||
client.prepareIndex(indexName).setSource("number", i, "foo", "bar").get();
|
client.prepareIndex(indexName).setSource("number", i, "foo", "bar").get();
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||||
import org.elasticsearch.common.Priority;
|
import org.elasticsearch.common.Priority;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.rest.RestStatus;
|
import org.elasticsearch.rest.RestStatus;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.elasticsearch.xcontent.XContentType;
|
import org.elasticsearch.xcontent.XContentType;
|
||||||
|
@ -126,7 +127,7 @@ public class WaitActiveShardCountIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private String source(String id, String nameValue) {
|
private String source(String id, String nameValue) {
|
||||||
return formatted("""
|
return Strings.format("""
|
||||||
{ "type1" : { "id" : "%s", "name" : "%s" } }
|
{ "type1" : { "id" : "%s", "name" : "%s" } }
|
||||||
""", id, nameValue);
|
""", id, nameValue);
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,6 +16,7 @@ import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.cluster.coordination.NoMasterBlockService;
|
import org.elasticsearch.cluster.coordination.NoMasterBlockService;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.core.TimeValue;
|
import org.elasticsearch.core.TimeValue;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.elasticsearch.test.disruption.BlockMasterServiceOnMaster;
|
import org.elasticsearch.test.disruption.BlockMasterServiceOnMaster;
|
||||||
|
@ -142,7 +143,7 @@ public class MasterDisruptionIT extends AbstractDisruptionTestCase {
|
||||||
assertEquals("different meta data version", state.metadata().version(), nodeState.metadata().version());
|
assertEquals("different meta data version", state.metadata().version(), nodeState.metadata().version());
|
||||||
assertEquals("different routing", state.routingTable().toString(), nodeState.routingTable().toString());
|
assertEquals("different routing", state.routingTable().toString(), nodeState.routingTable().toString());
|
||||||
} catch (AssertionError t) {
|
} catch (AssertionError t) {
|
||||||
fail(formatted("""
|
fail(Strings.format("""
|
||||||
failed comparing cluster state: %s
|
failed comparing cluster state: %s
|
||||||
--- cluster state of node [%s]: ---
|
--- cluster state of node [%s]: ---
|
||||||
%s
|
%s
|
||||||
|
@ -202,7 +203,7 @@ public class MasterDisruptionIT extends AbstractDisruptionTestCase {
|
||||||
success = false;
|
success = false;
|
||||||
}
|
}
|
||||||
if (success == false) {
|
if (success == false) {
|
||||||
fail(formatted("""
|
fail(Strings.format("""
|
||||||
node [%s] has no master or has blocks, despite of being on the right side of the partition. State dump:
|
node [%s] has no master or has blocks, despite of being on the right side of the partition. State dump:
|
||||||
%s""", node, nodeState));
|
%s""", node, nodeState));
|
||||||
}
|
}
|
||||||
|
|
|
@ -789,7 +789,7 @@ public class GetActionIT extends ESIntegTestCase {
|
||||||
void indexSingleDocumentWithStringFieldsGeneratedFromText(boolean stored, boolean sourceEnabled) {
|
void indexSingleDocumentWithStringFieldsGeneratedFromText(boolean stored, boolean sourceEnabled) {
|
||||||
|
|
||||||
String storedString = stored ? "true" : "false";
|
String storedString = stored ? "true" : "false";
|
||||||
String createIndexSource = formatted("""
|
String createIndexSource = Strings.format("""
|
||||||
{
|
{
|
||||||
"settings": {
|
"settings": {
|
||||||
"index.translog.flush_threshold_size": "1pb",
|
"index.translog.flush_threshold_size": "1pb",
|
||||||
|
|
|
@ -35,7 +35,6 @@ import org.hamcrest.Matchers;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.ConcurrentMap;
|
import java.util.concurrent.ConcurrentMap;
|
||||||
|
@ -272,7 +271,7 @@ public class IndicesLifecycleListenerIT extends ESIntegTestCase {
|
||||||
try {
|
try {
|
||||||
assertBusy(waitPredicate, 1, TimeUnit.MINUTES);
|
assertBusy(waitPredicate, 1, TimeUnit.MINUTES);
|
||||||
} catch (AssertionError ae) {
|
} catch (AssertionError ae) {
|
||||||
fail(String.format(Locale.ROOT, """
|
fail(Strings.format("""
|
||||||
failed to observe expect shard states
|
failed to observe expect shard states
|
||||||
expected: [%d] shards with states: %s
|
expected: [%d] shards with states: %s
|
||||||
observed:
|
observed:
|
||||||
|
|
|
@ -10,6 +10,7 @@ package org.elasticsearch.indices.mapping;
|
||||||
|
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.action.index.IndexResponse;
|
import org.elasticsearch.action.index.IndexResponse;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.query.QueryBuilders;
|
import org.elasticsearch.index.query.QueryBuilders;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
|
|
||||||
|
@ -28,7 +29,7 @@ public class ConcurrentDynamicTemplateIT extends ESIntegTestCase {
|
||||||
// see #3544
|
// see #3544
|
||||||
public void testConcurrentDynamicMapping() throws Exception {
|
public void testConcurrentDynamicMapping() throws Exception {
|
||||||
final String fieldName = "field";
|
final String fieldName = "field";
|
||||||
final String mapping = formatted("""
|
final String mapping = Strings.format("""
|
||||||
{
|
{
|
||||||
"dynamic_templates": [
|
"dynamic_templates": [
|
||||||
{
|
{
|
||||||
|
|
|
@ -18,6 +18,7 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||||
import org.elasticsearch.action.support.PlainActionFuture;
|
import org.elasticsearch.action.support.PlainActionFuture;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||||
import org.elasticsearch.common.util.CollectionUtils;
|
import org.elasticsearch.common.util.CollectionUtils;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
import org.elasticsearch.repositories.fs.FsRepository;
|
import org.elasticsearch.repositories.fs.FsRepository;
|
||||||
|
@ -31,7 +32,6 @@ import java.nio.file.Path;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ public class IndexSnapshotsServiceIT extends AbstractSnapshotIntegTestCase {
|
||||||
assertThat(repositoryException, is(notNullValue()));
|
assertThat(repositoryException, is(notNullValue()));
|
||||||
assertThat(
|
assertThat(
|
||||||
repositoryException.getMessage(),
|
repositoryException.getMessage(),
|
||||||
equalTo(formatted("[%s] Unable to find the latest snapshot for shard [[idx][0]]", repository))
|
equalTo(Strings.format("[%s] Unable to find the latest snapshot for shard [[idx][0]]", repository))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -133,7 +133,7 @@ public class IndexSnapshotsServiceIT extends AbstractSnapshotIntegTestCase {
|
||||||
indexRandomDocs(indexName2, 10);
|
indexRandomDocs(indexName2, 10);
|
||||||
}
|
}
|
||||||
final List<String> snapshotIndices = randomSubsetOf(indices);
|
final List<String> snapshotIndices = randomSubsetOf(indices);
|
||||||
final SnapshotInfo snapshotInfo = createSnapshot(repoName, formatted("snap-%03d", i), snapshotIndices);
|
final SnapshotInfo snapshotInfo = createSnapshot(repoName, Strings.format("snap-%03d", i), snapshotIndices);
|
||||||
if (snapshotInfo.indices().contains(indexName)) {
|
if (snapshotInfo.indices().contains(indexName)) {
|
||||||
lastSnapshot = snapshotInfo;
|
lastSnapshot = snapshotInfo;
|
||||||
ClusterStateResponse clusterStateResponse = admin().cluster().prepareState().execute().actionGet();
|
ClusterStateResponse clusterStateResponse = admin().cluster().prepareState().execute().actionGet();
|
||||||
|
@ -206,10 +206,12 @@ public class IndexSnapshotsServiceIT extends AbstractSnapshotIntegTestCase {
|
||||||
createIndexWithContent(indexName);
|
createIndexWithContent(indexName);
|
||||||
|
|
||||||
int snapshotIdx = 0;
|
int snapshotIdx = 0;
|
||||||
createSnapshot(failingRepoName, formatted("snap-%03d", snapshotIdx++), Collections.singletonList(indexName));
|
Object[] args1 = new Object[] { snapshotIdx++ };
|
||||||
|
createSnapshot(failingRepoName, Strings.format("snap-%03d", args1), Collections.singletonList(indexName));
|
||||||
SnapshotInfo latestSnapshot = null;
|
SnapshotInfo latestSnapshot = null;
|
||||||
for (String workingRepoName : workingRepoNames) {
|
for (String workingRepoName : workingRepoNames) {
|
||||||
String snapshot = formatted("snap-%03d", snapshotIdx++);
|
Object[] args = new Object[] { snapshotIdx++ };
|
||||||
|
String snapshot = Strings.format("snap-%03d", args);
|
||||||
latestSnapshot = createSnapshot(workingRepoName, snapshot, Collections.singletonList(indexName));
|
latestSnapshot = createSnapshot(workingRepoName, snapshot, Collections.singletonList(indexName));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -234,7 +236,7 @@ public class IndexSnapshotsServiceIT extends AbstractSnapshotIntegTestCase {
|
||||||
assertThat(error.isPresent(), is(equalTo(true)));
|
assertThat(error.isPresent(), is(equalTo(true)));
|
||||||
assertThat(
|
assertThat(
|
||||||
error.get().getMessage(),
|
error.get().getMessage(),
|
||||||
equalTo(String.format(Locale.ROOT, "[%s] Unable to find the latest snapshot for shard [[%s][0]]", failingRepoName, indexName))
|
equalTo(Strings.format("[%s] Unable to find the latest snapshot for shard [[%s][0]]", failingRepoName, indexName))
|
||||||
);
|
);
|
||||||
|
|
||||||
for (String workingRepoName : workingRepoNames) {
|
for (String workingRepoName : workingRepoNames) {
|
||||||
|
@ -264,7 +266,8 @@ public class IndexSnapshotsServiceIT extends AbstractSnapshotIntegTestCase {
|
||||||
int snapshotIdx = 0;
|
int snapshotIdx = 0;
|
||||||
SnapshotInfo expectedLatestSnapshot = null;
|
SnapshotInfo expectedLatestSnapshot = null;
|
||||||
for (String repository : repositories) {
|
for (String repository : repositories) {
|
||||||
String snapshot = formatted("snap-%03d", snapshotIdx++);
|
Object[] args = new Object[] { snapshotIdx++ };
|
||||||
|
String snapshot = Strings.format("snap-%03d", args);
|
||||||
expectedLatestSnapshot = createSnapshot(repository, snapshot, Collections.singletonList(indexName));
|
expectedLatestSnapshot = createSnapshot(repository, snapshot, Collections.singletonList(indexName));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,7 @@ package org.elasticsearch.script;
|
||||||
|
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.elasticsearch.xcontent.XContentType;
|
import org.elasticsearch.xcontent.XContentType;
|
||||||
|
@ -40,7 +41,7 @@ public class StoredScriptsIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testBasics() {
|
public void testBasics() {
|
||||||
assertAcked(client().admin().cluster().preparePutStoredScript().setId("foobar").setContent(new BytesArray(formatted("""
|
assertAcked(client().admin().cluster().preparePutStoredScript().setId("foobar").setContent(new BytesArray(Strings.format("""
|
||||||
{"script": {"lang": "%s", "source": "1"} }
|
{"script": {"lang": "%s", "source": "1"} }
|
||||||
""", LANG)), XContentType.JSON));
|
""", LANG)), XContentType.JSON));
|
||||||
String script = client().admin().cluster().prepareGetStoredScript("foobar").get().getSource().getSource();
|
String script = client().admin().cluster().prepareGetStoredScript("foobar").get().getSource().getSource();
|
||||||
|
@ -53,9 +54,9 @@ public class StoredScriptsIT extends ESIntegTestCase {
|
||||||
|
|
||||||
IllegalArgumentException e = expectThrows(
|
IllegalArgumentException e = expectThrows(
|
||||||
IllegalArgumentException.class,
|
IllegalArgumentException.class,
|
||||||
() -> client().admin().cluster().preparePutStoredScript().setId("id#").setContent(new BytesArray(formatted("""
|
() -> { client().admin().cluster().preparePutStoredScript().setId("id#").setContent(new BytesArray(Strings.format("""
|
||||||
{"script": {"lang": "%s", "source": "1"} }
|
{"script": {"lang": "%s", "source": "1"} }
|
||||||
""", LANG)), XContentType.JSON).get()
|
""", LANG)), XContentType.JSON).get(); }
|
||||||
);
|
);
|
||||||
assertEquals("Validation Failed: 1: id cannot contain '#' for stored script;", e.getMessage());
|
assertEquals("Validation Failed: 1: id cannot contain '#' for stored script;", e.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -63,9 +64,9 @@ public class StoredScriptsIT extends ESIntegTestCase {
|
||||||
public void testMaxScriptSize() {
|
public void testMaxScriptSize() {
|
||||||
IllegalArgumentException e = expectThrows(
|
IllegalArgumentException e = expectThrows(
|
||||||
IllegalArgumentException.class,
|
IllegalArgumentException.class,
|
||||||
() -> client().admin().cluster().preparePutStoredScript().setId("foobar").setContent(new BytesArray(formatted("""
|
() -> { client().admin().cluster().preparePutStoredScript().setId("foobar").setContent(new BytesArray(Strings.format("""
|
||||||
{"script": { "lang": "%s", "source":"0123456789abcdef"} }\
|
{"script": { "lang": "%s", "source":"0123456789abcdef"} }\
|
||||||
""", LANG)), XContentType.JSON).get()
|
""", LANG)), XContentType.JSON).get(); }
|
||||||
);
|
);
|
||||||
assertEquals("exceeded max allowed stored script size in bytes [64] with size [65] for script [foobar]", e.getMessage());
|
assertEquals("exceeded max allowed stored script size in bytes [64] with size [65] for script [foobar]", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,7 @@ import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||||
import org.elasticsearch.action.search.SearchResponse;
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||||
import org.elasticsearch.index.query.QueryBuilders;
|
import org.elasticsearch.index.query.QueryBuilders;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
|
@ -520,7 +521,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
||||||
assertThat(terms.getBuckets().size(), equalTo(5));
|
assertThat(terms.getBuckets().size(), equalTo(5));
|
||||||
|
|
||||||
for (int i = 0; i < 5; i++) {
|
for (int i = 0; i < 5; i++) {
|
||||||
String key = formatted("%07.2f", (double) i);
|
String key = Strings.format("%07.2f", (double) i);
|
||||||
DoubleTerms.Bucket bucket = terms.getBucketByKey(key);
|
DoubleTerms.Bucket bucket = terms.getBucketByKey(key);
|
||||||
assertThat(bucket, notNullValue());
|
assertThat(bucket, notNullValue());
|
||||||
assertThat(bucket.getKeyAsString(), equalTo(key));
|
assertThat(bucket.getKeyAsString(), equalTo(key));
|
||||||
|
|
|
@ -11,6 +11,7 @@ import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||||
import org.elasticsearch.action.search.SearchResponse;
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||||
import org.elasticsearch.index.query.QueryBuilders;
|
import org.elasticsearch.index.query.QueryBuilders;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
|
@ -510,7 +511,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
|
||||||
assertThat(terms.getBuckets().size(), equalTo(5));
|
assertThat(terms.getBuckets().size(), equalTo(5));
|
||||||
|
|
||||||
for (int i = 0; i < 5; i++) {
|
for (int i = 0; i < 5; i++) {
|
||||||
String key = formatted("%04d", i);
|
String key = Strings.format("%04d", i);
|
||||||
LongTerms.Bucket bucket = terms.getBucketByKey(key);
|
LongTerms.Bucket bucket = terms.getBucketByKey(key);
|
||||||
assertThat(bucket, notNullValue());
|
assertThat(bucket, notNullValue());
|
||||||
assertThat(bucket.getKeyAsString(), equalTo(key));
|
assertThat(bucket.getKeyAsString(), equalTo(key));
|
||||||
|
|
|
@ -150,7 +150,8 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
||||||
classes.toXContent(responseBuilder, ToXContent.EMPTY_PARAMS);
|
classes.toXContent(responseBuilder, ToXContent.EMPTY_PARAMS);
|
||||||
responseBuilder.endObject();
|
responseBuilder.endObject();
|
||||||
|
|
||||||
String result = formatted("""
|
Object[] args = new Object[] { type.equals("long") ? "0" : "\"0\"", type.equals("long") ? "1" : "\"1\"" };
|
||||||
|
String result = Strings.format("""
|
||||||
{
|
{
|
||||||
"class": {
|
"class": {
|
||||||
"doc_count_error_upper_bound": 0,
|
"doc_count_error_upper_bound": 0,
|
||||||
|
@ -191,7 +192,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""", type.equals("long") ? "0" : "\"0\"", type.equals("long") ? "1" : "\"1\"");
|
""", args);
|
||||||
assertThat(Strings.toString(responseBuilder), equalTo(XContentHelper.stripWhitespace(result)));
|
assertThat(Strings.toString(responseBuilder), equalTo(XContentHelper.stripWhitespace(result)));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,6 +15,7 @@ import org.elasticsearch.action.search.SearchResponse;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.script.MockScriptPlugin;
|
import org.elasticsearch.script.MockScriptPlugin;
|
||||||
import org.elasticsearch.script.Script;
|
import org.elasticsearch.script.Script;
|
||||||
|
@ -303,23 +304,29 @@ public class ScriptedMetricIT extends ESIntegTestCase {
|
||||||
// When using the MockScriptPlugin we can map Stored scripts to inline scripts:
|
// When using the MockScriptPlugin we can map Stored scripts to inline scripts:
|
||||||
// the id of the stored script is used in test method while the source of the stored script
|
// the id of the stored script is used in test method while the source of the stored script
|
||||||
// must match a predefined script from CustomScriptPlugin.pluginScripts() method
|
// must match a predefined script from CustomScriptPlugin.pluginScripts() method
|
||||||
assertAcked(client().admin().cluster().preparePutStoredScript().setId("initScript_stored").setContent(new BytesArray(formatted("""
|
assertAcked(
|
||||||
|
client().admin().cluster().preparePutStoredScript().setId("initScript_stored").setContent(new BytesArray(Strings.format("""
|
||||||
{"script": {"lang": "%s", "source": "vars.multiplier = 3"} }
|
{"script": {"lang": "%s", "source": "vars.multiplier = 3"} }
|
||||||
""", MockScriptPlugin.NAME)), XContentType.JSON));
|
""", MockScriptPlugin.NAME)), XContentType.JSON)
|
||||||
|
);
|
||||||
assertAcked(client().admin().cluster().preparePutStoredScript().setId("mapScript_stored").setContent(new BytesArray(formatted("""
|
|
||||||
{"script": {"lang": "%s", "source": "state.list.add(vars.multiplier)"} }
|
|
||||||
""", MockScriptPlugin.NAME)), XContentType.JSON));
|
|
||||||
|
|
||||||
assertAcked(
|
assertAcked(
|
||||||
client().admin().cluster().preparePutStoredScript().setId("combineScript_stored").setContent(new BytesArray(formatted("""
|
client().admin().cluster().preparePutStoredScript().setId("mapScript_stored").setContent(new BytesArray(Strings.format("""
|
||||||
|
{"script": {"lang": "%s", "source": "state.list.add(vars.multiplier)"} }
|
||||||
|
""", MockScriptPlugin.NAME)), XContentType.JSON)
|
||||||
|
);
|
||||||
|
|
||||||
|
assertAcked(
|
||||||
|
client().admin().cluster().preparePutStoredScript().setId("combineScript_stored").setContent(new BytesArray(Strings.format("""
|
||||||
{"script": {"lang": "%s", "source": "sum state values as a new aggregation"} }
|
{"script": {"lang": "%s", "source": "sum state values as a new aggregation"} }
|
||||||
""", MockScriptPlugin.NAME)), XContentType.JSON)
|
""", MockScriptPlugin.NAME)), XContentType.JSON)
|
||||||
);
|
);
|
||||||
|
|
||||||
assertAcked(client().admin().cluster().preparePutStoredScript().setId("reduceScript_stored").setContent(new BytesArray(formatted("""
|
assertAcked(
|
||||||
|
client().admin().cluster().preparePutStoredScript().setId("reduceScript_stored").setContent(new BytesArray(Strings.format("""
|
||||||
{"script": {"lang": "%s", "source": "sum all states (lists) values as a new aggregation"} }
|
{"script": {"lang": "%s", "source": "sum all states (lists) values as a new aggregation"} }
|
||||||
""", MockScriptPlugin.NAME)), XContentType.JSON));
|
""", MockScriptPlugin.NAME)), XContentType.JSON)
|
||||||
|
);
|
||||||
|
|
||||||
indexRandom(true, builders);
|
indexRandom(true, builders);
|
||||||
ensureSearchable();
|
ensureSearchable();
|
||||||
|
|
|
@ -20,6 +20,7 @@ import org.elasticsearch.common.lucene.search.function.CombineFunction;
|
||||||
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
|
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
|
||||||
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery.ScoreMode;
|
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery.ScoreMode;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.query.QueryBuilder;
|
import org.elasticsearch.index.query.QueryBuilder;
|
||||||
import org.elasticsearch.index.query.QueryBuilders;
|
import org.elasticsearch.index.query.QueryBuilders;
|
||||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
||||||
|
@ -771,27 +772,27 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
||||||
ZonedDateTime docDate = dt.minusDays(1);
|
ZonedDateTime docDate = dt.minusDays(1);
|
||||||
String docDateString = docDate.getYear()
|
String docDateString = docDate.getYear()
|
||||||
+ "-"
|
+ "-"
|
||||||
+ formatted("%02d", docDate.getMonthValue())
|
+ Strings.format("%02d", docDate.getMonthValue())
|
||||||
+ "-"
|
+ "-"
|
||||||
+ formatted("%02d", docDate.getDayOfMonth());
|
+ Strings.format("%02d", docDate.getDayOfMonth());
|
||||||
client().index(
|
client().index(
|
||||||
indexRequest("test").id("1").source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject())
|
indexRequest("test").id("1").source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject())
|
||||||
).actionGet();
|
).actionGet();
|
||||||
docDate = dt.minusDays(2);
|
docDate = dt.minusDays(2);
|
||||||
docDateString = docDate.getYear()
|
docDateString = docDate.getYear()
|
||||||
+ "-"
|
+ "-"
|
||||||
+ formatted("%02d", docDate.getMonthValue())
|
+ Strings.format("%02d", docDate.getMonthValue())
|
||||||
+ "-"
|
+ "-"
|
||||||
+ formatted("%02d", docDate.getDayOfMonth());
|
+ Strings.format("%02d", docDate.getDayOfMonth());
|
||||||
client().index(
|
client().index(
|
||||||
indexRequest("test").id("2").source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject())
|
indexRequest("test").id("2").source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject())
|
||||||
).actionGet();
|
).actionGet();
|
||||||
docDate = dt.minusDays(3);
|
docDate = dt.minusDays(3);
|
||||||
docDateString = docDate.getYear()
|
docDateString = docDate.getYear()
|
||||||
+ "-"
|
+ "-"
|
||||||
+ formatted("%02d", docDate.getMonthValue())
|
+ Strings.format("%02d", docDate.getMonthValue())
|
||||||
+ "-"
|
+ "-"
|
||||||
+ formatted("%02d", docDate.getDayOfMonth());
|
+ Strings.format("%02d", docDate.getDayOfMonth());
|
||||||
client().index(
|
client().index(
|
||||||
indexRequest("test").id("3").source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject())
|
indexRequest("test").id("3").source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject())
|
||||||
).actionGet();
|
).actionGet();
|
||||||
|
|
|
@ -20,6 +20,7 @@ import org.elasticsearch.action.search.SearchResponse;
|
||||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||||
import org.elasticsearch.index.query.QueryBuilders;
|
import org.elasticsearch.index.query.QueryBuilders;
|
||||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders;
|
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders;
|
||||||
|
@ -166,7 +167,7 @@ public class FieldSortIT extends ESIntegTestCase {
|
||||||
"foo",
|
"foo",
|
||||||
"bar",
|
"bar",
|
||||||
"timeUpdated",
|
"timeUpdated",
|
||||||
"2014/07/" + formatted("%02d", i + 1) + " " + formatted("%02d", j + 1) + ":00:00"
|
"2014/07/" + Strings.format("%02d", i + 1) + " " + Strings.format("%02d", j + 1) + ":00:00"
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -191,7 +192,7 @@ public class FieldSortIT extends ESIntegTestCase {
|
||||||
.setQuery(
|
.setQuery(
|
||||||
QueryBuilders.boolQuery()
|
QueryBuilders.boolQuery()
|
||||||
.must(QueryBuilders.termQuery("foo", "bar"))
|
.must(QueryBuilders.termQuery("foo", "bar"))
|
||||||
.must(QueryBuilders.rangeQuery("timeUpdated").gte("2014/" + formatted("%02d", randomIntBetween(1, 7)) + "/01"))
|
.must(QueryBuilders.rangeQuery("timeUpdated").gte("2014/" + Strings.format("%02d", randomIntBetween(1, 7)) + "/01"))
|
||||||
)
|
)
|
||||||
.addSort(new FieldSortBuilder("timeUpdated").order(SortOrder.ASC).unmappedType("date"))
|
.addSort(new FieldSortBuilder("timeUpdated").order(SortOrder.ASC).unmappedType("date"))
|
||||||
.setSize(scaledRandomIntBetween(1, docs))
|
.setSize(scaledRandomIntBetween(1, docs))
|
||||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
import org.elasticsearch.common.FieldMemoryStats;
|
import org.elasticsearch.common.FieldMemoryStats;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.Fuzziness;
|
import org.elasticsearch.common.unit.Fuzziness;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||||
import org.elasticsearch.index.query.QueryBuilders;
|
import org.elasticsearch.index.query.QueryBuilders;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
|
@ -1248,7 +1249,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for (String expectedSuggestion : suggestions) {
|
for (String expectedSuggestion : suggestions) {
|
||||||
String errMsg = String.format(Locale.ROOT, "Expected elem %s to be in list %s", expectedSuggestion, suggestionList);
|
String errMsg = Strings.format("Expected elem %s to be in list %s", expectedSuggestion, suggestionList);
|
||||||
assertThat(errMsg, suggestionList, hasItem(expectedSuggestion));
|
assertThat(errMsg, suggestionList, hasItem(expectedSuggestion));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||||
import org.elasticsearch.core.IOUtils;
|
import org.elasticsearch.core.IOUtils;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.repositories.IndexId;
|
import org.elasticsearch.repositories.IndexId;
|
||||||
import org.elasticsearch.repositories.IndexMetaDataGenerations;
|
import org.elasticsearch.repositories.IndexMetaDataGenerations;
|
||||||
import org.elasticsearch.repositories.Repository;
|
import org.elasticsearch.repositories.Repository;
|
||||||
|
@ -39,7 +40,6 @@ import java.nio.file.Path;
|
||||||
import java.nio.file.StandardOpenOption;
|
import java.nio.file.StandardOpenOption;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
@ -289,7 +289,7 @@ public class CorruptedBlobStoreRepositoryIT extends AbstractSnapshotIntegTestCas
|
||||||
|
|
||||||
final SnapshotId snapshotToCorrupt = randomFrom(repositoryData.getSnapshotIds());
|
final SnapshotId snapshotToCorrupt = randomFrom(repositoryData.getSnapshotIds());
|
||||||
logger.info("--> delete root level snapshot metadata blob for snapshot [{}]", snapshotToCorrupt);
|
logger.info("--> delete root level snapshot metadata blob for snapshot [{}]", snapshotToCorrupt);
|
||||||
Files.delete(repo.resolve(String.format(Locale.ROOT, BlobStoreRepository.SNAPSHOT_NAME_FORMAT, snapshotToCorrupt.getUUID())));
|
Files.delete(repo.resolve(Strings.format(BlobStoreRepository.SNAPSHOT_NAME_FORMAT, snapshotToCorrupt.getUUID())));
|
||||||
|
|
||||||
logger.info("--> strip version information from index-N blob");
|
logger.info("--> strip version information from index-N blob");
|
||||||
final RepositoryData withoutVersions = new RepositoryData(
|
final RepositoryData withoutVersions = new RepositoryData(
|
||||||
|
|
|
@ -19,13 +19,13 @@ import org.elasticsearch.action.search.SearchResponse;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||||
import org.elasticsearch.common.lucene.uid.Versions;
|
import org.elasticsearch.common.lucene.uid.Versions;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.VersionType;
|
import org.elasticsearch.index.VersionType;
|
||||||
import org.elasticsearch.index.engine.VersionConflictEngineException;
|
import org.elasticsearch.index.engine.VersionConflictEngineException;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -471,7 +471,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
||||||
// zero-pad sequential
|
// zero-pad sequential
|
||||||
logger.info("--> use zero-padded sequential ids");
|
logger.info("--> use zero-padded sequential ids");
|
||||||
yield new IDSource() {
|
yield new IDSource() {
|
||||||
final String zeroPad = String.format(Locale.ROOT, "%0" + TestUtil.nextInt(random, 4, 20) + "d", 0);
|
final String zeroPad = Strings.format("%0" + TestUtil.nextInt(random, 4, 20) + "d", 0);
|
||||||
int upto;
|
int upto;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -917,4 +917,11 @@ public class Strings {
|
||||||
.collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append)
|
.collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append)
|
||||||
.toString();
|
.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Alias for {@link org.elasticsearch.core.Strings#format}
|
||||||
|
*/
|
||||||
|
public static String format(String format, Object... args) {
|
||||||
|
return org.elasticsearch.core.Strings.format(format, args);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -588,9 +588,10 @@ public class ElasticsearchExceptionTests extends ESTestCase {
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
actual = Strings.toString(builder);
|
actual = Strings.toString(builder);
|
||||||
}
|
}
|
||||||
assertThat(actual, startsWith(formatted("""
|
Object[] args = new Object[] { Constants.WINDOWS ? "\\r\\n" : "\\n" };
|
||||||
|
assertThat(actual, startsWith(Strings.format("""
|
||||||
{"type":"exception","reason":"foo","caused_by":{"type":"illegal_state_exception","reason":"bar",\
|
{"type":"exception","reason":"foo","caused_by":{"type":"illegal_state_exception","reason":"bar",\
|
||||||
"stack_trace":"java.lang.IllegalStateException: bar%s\\tat org.elasticsearch.""", Constants.WINDOWS ? "\\r\\n" : "\\n")));
|
"stack_trace":"java.lang.IllegalStateException: bar%s\\tat org.elasticsearch.""", args)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -93,10 +93,23 @@ public class ClusterAllocationExplainActionTests extends ESTestCase {
|
||||||
} else {
|
} else {
|
||||||
explanation = "the shard is in the process of initializing on node [], " + "wait until initialization has completed";
|
explanation = "the shard is in the process of initializing on node [], " + "wait until initialization has completed";
|
||||||
}
|
}
|
||||||
assertEquals(
|
Object[] args = new Object[] {
|
||||||
XContentHelper.stripWhitespace(
|
shardRoutingState.toString().toLowerCase(Locale.ROOT),
|
||||||
formatted(
|
shard.unassignedInfo() != null
|
||||||
|
? Strings.format(
|
||||||
"""
|
"""
|
||||||
|
,"unassigned_info": {"reason": "%s", "at": "%s", "last_allocation_status": "%s"}
|
||||||
|
""",
|
||||||
|
shard.unassignedInfo().getReason(),
|
||||||
|
UnassignedInfo.DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(shard.unassignedInfo().getUnassignedTimeInMillis())),
|
||||||
|
AllocationDecision.fromAllocationStatus(shard.unassignedInfo().getLastAllocationStatus())
|
||||||
|
)
|
||||||
|
: "",
|
||||||
|
cae.getCurrentNode().getId(),
|
||||||
|
cae.getCurrentNode().getName(),
|
||||||
|
cae.getCurrentNode().getAddress(),
|
||||||
|
explanation };
|
||||||
|
assertEquals(XContentHelper.stripWhitespace(Strings.format("""
|
||||||
{
|
{
|
||||||
"index": "idx",
|
"index": "idx",
|
||||||
"shard": 0,
|
"shard": 0,
|
||||||
|
@ -109,28 +122,7 @@ public class ClusterAllocationExplainActionTests extends ESTestCase {
|
||||||
"transport_address": "%s"
|
"transport_address": "%s"
|
||||||
},
|
},
|
||||||
"explanation": "%s"
|
"explanation": "%s"
|
||||||
}""",
|
}""", args)), Strings.toString(builder));
|
||||||
shardRoutingState.toString().toLowerCase(Locale.ROOT),
|
|
||||||
shard.unassignedInfo() != null
|
|
||||||
? formatted(
|
|
||||||
"""
|
|
||||||
,"unassigned_info": {"reason": "%s", "at": "%s", "last_allocation_status": "%s"}
|
|
||||||
""",
|
|
||||||
shard.unassignedInfo().getReason(),
|
|
||||||
UnassignedInfo.DATE_TIME_FORMATTER.format(
|
|
||||||
Instant.ofEpochMilli(shard.unassignedInfo().getUnassignedTimeInMillis())
|
|
||||||
),
|
|
||||||
AllocationDecision.fromAllocationStatus(shard.unassignedInfo().getLastAllocationStatus())
|
|
||||||
)
|
|
||||||
: "",
|
|
||||||
cae.getCurrentNode().getId(),
|
|
||||||
cae.getCurrentNode().getName(),
|
|
||||||
cae.getCurrentNode().getAddress(),
|
|
||||||
explanation
|
|
||||||
)
|
|
||||||
),
|
|
||||||
Strings.toString(builder)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFindAnyUnassignedShardToExplain() {
|
public void testFindAnyUnassignedShardToExplain() {
|
||||||
|
|
|
@ -84,7 +84,7 @@ public final class ClusterAllocationExplanationTests extends ESTestCase {
|
||||||
ClusterAllocationExplanation cae = randomClusterAllocationExplanation(true, true);
|
ClusterAllocationExplanation cae = randomClusterAllocationExplanation(true, true);
|
||||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||||
cae.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
cae.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||||
assertEquals(XContentHelper.stripWhitespace(formatted("""
|
assertEquals(XContentHelper.stripWhitespace(Strings.format("""
|
||||||
{
|
{
|
||||||
"index": "idx",
|
"index": "idx",
|
||||||
"shard": 0,
|
"shard": 0,
|
||||||
|
@ -112,7 +112,7 @@ public final class ClusterAllocationExplanationTests extends ESTestCase {
|
||||||
actual,
|
actual,
|
||||||
equalTo(
|
equalTo(
|
||||||
XContentHelper.stripWhitespace(
|
XContentHelper.stripWhitespace(
|
||||||
formatted(
|
Strings.format(
|
||||||
"""
|
"""
|
||||||
{
|
{
|
||||||
"note": "%s",
|
"note": "%s",
|
||||||
|
|
|
@ -60,7 +60,7 @@ public class ClusterRerouteResponseTests extends ESTestCase {
|
||||||
|
|
||||||
var result = toXContent(clusterRerouteResponse, new ToXContent.MapParams(Map.of("explain", "true", "metric", "none")));
|
var result = toXContent(clusterRerouteResponse, new ToXContent.MapParams(Map.of("explain", "true", "metric", "none")));
|
||||||
|
|
||||||
assertThat(result, equalTo(XContentHelper.stripWhitespace(formatted("""
|
assertThat(result, equalTo(XContentHelper.stripWhitespace(Strings.format("""
|
||||||
{
|
{
|
||||||
"acknowledged": true,
|
"acknowledged": true,
|
||||||
"explanations": [
|
"explanations": [
|
||||||
|
@ -90,7 +90,7 @@ public class ClusterRerouteResponseTests extends ESTestCase {
|
||||||
|
|
||||||
var result = toXContent(clusterRerouteResponse, ToXContent.EMPTY_PARAMS);
|
var result = toXContent(clusterRerouteResponse, ToXContent.EMPTY_PARAMS);
|
||||||
|
|
||||||
assertThat(result, equalTo(XContentHelper.stripWhitespace(formatted("""
|
assertThat(result, equalTo(XContentHelper.stripWhitespace(Strings.format("""
|
||||||
{
|
{
|
||||||
"acknowledged": true,
|
"acknowledged": true,
|
||||||
"state": {
|
"state": {
|
||||||
|
|
|
@ -11,6 +11,7 @@ package org.elasticsearch.action.admin.cluster.snapshots.status;
|
||||||
import org.elasticsearch.cluster.SnapshotsInProgress;
|
import org.elasticsearch.cluster.SnapshotsInProgress;
|
||||||
import org.elasticsearch.common.UUIDs;
|
import org.elasticsearch.common.UUIDs;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
import org.elasticsearch.snapshots.Snapshot;
|
import org.elasticsearch.snapshots.Snapshot;
|
||||||
import org.elasticsearch.snapshots.SnapshotId;
|
import org.elasticsearch.snapshots.SnapshotId;
|
||||||
|
@ -55,7 +56,7 @@ public class SnapshotStatusTests extends AbstractChunkedSerializingTestCase<Snap
|
||||||
case FAILURE -> failedShards++;
|
case FAILURE -> failedShards++;
|
||||||
}
|
}
|
||||||
|
|
||||||
String expected = formatted(
|
String expected = Strings.format(
|
||||||
"""
|
"""
|
||||||
{
|
{
|
||||||
"snapshot" : "test-snap",
|
"snapshot" : "test-snap",
|
||||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodeStatsTests;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
import org.elasticsearch.common.network.NetworkModule;
|
import org.elasticsearch.common.network.NetworkModule;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.stats.IndexingPressureStats;
|
import org.elasticsearch.index.stats.IndexingPressureStats;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.xcontent.XContentType;
|
import org.elasticsearch.xcontent.XContentType;
|
||||||
|
@ -92,7 +93,7 @@ public class ClusterStatsNodesTests extends ESTestCase {
|
||||||
long failedCount = statValues[1];
|
long failedCount = statValues[1];
|
||||||
long current = statValues[2];
|
long current = statValues[2];
|
||||||
long timeInMillis = statValues[3];
|
long timeInMillis = statValues[3];
|
||||||
processorStatsString.append(formatted("""
|
processorStatsString.append(Strings.format("""
|
||||||
"%s":{"count":%s,"failed":%s,"current":%s,"time_in_millis":%s}\
|
"%s":{"count":%s,"failed":%s,"current":%s,"time_in_millis":%s}\
|
||||||
""", entry.getKey(), count, failedCount, current, timeInMillis));
|
""", entry.getKey(), count, failedCount, current, timeInMillis));
|
||||||
if (iter.hasNext()) {
|
if (iter.hasNext()) {
|
||||||
|
@ -100,7 +101,7 @@ public class ClusterStatsNodesTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
processorStatsString.append("}");
|
processorStatsString.append("}");
|
||||||
assertThat(toXContent(stats, XContentType.JSON, false).utf8ToString(), equalTo(formatted("""
|
assertThat(toXContent(stats, XContentType.JSON, false).utf8ToString(), equalTo(Strings.format("""
|
||||||
{"ingest":{"number_of_pipelines":%s,"processor_stats":%s}}\
|
{"ingest":{"number_of_pipelines":%s,"processor_stats":%s}}\
|
||||||
""", stats.pipelineCount, processorStatsString)));
|
""", stats.pipelineCount, processorStatsString)));
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ public class IndexFeatureStatsTests extends AbstractWireSerializingTestCase<Inde
|
||||||
|
|
||||||
public void testToXContent() {
|
public void testToXContent() {
|
||||||
IndexFeatureStats testInstance = createTestInstance();
|
IndexFeatureStats testInstance = createTestInstance();
|
||||||
assertEquals(formatted("""
|
assertEquals(Strings.format("""
|
||||||
{"name":"%s","count":%s,"index_count":%s}\
|
{"name":"%s","count":%s,"index_count":%s}\
|
||||||
""", testInstance.name, testInstance.count, testInstance.indexCount), Strings.toString(testInstance));
|
""", testInstance.name, testInstance.count, testInstance.indexCount), Strings.toString(testInstance));
|
||||||
}
|
}
|
||||||
|
|
|
@ -96,7 +96,7 @@ public class MappingStatsTests extends AbstractWireSerializingTestCase<MappingSt
|
||||||
private static final String SCRIPT_4 = scriptAsJSON("params._source.field");
|
private static final String SCRIPT_4 = scriptAsJSON("params._source.field");
|
||||||
|
|
||||||
public void testToXContent() {
|
public void testToXContent() {
|
||||||
String mapping = formatted(MAPPING_TEMPLATE, SCRIPT_1, SCRIPT_2, SCRIPT_3, SCRIPT_4, SCRIPT_3, SCRIPT_4, SCRIPT_1);
|
String mapping = Strings.format(MAPPING_TEMPLATE, SCRIPT_1, SCRIPT_2, SCRIPT_3, SCRIPT_4, SCRIPT_3, SCRIPT_4, SCRIPT_1);
|
||||||
IndexMetadata meta = IndexMetadata.builder("index").settings(SINGLE_SHARD_NO_REPLICAS).putMapping(mapping).build();
|
IndexMetadata meta = IndexMetadata.builder("index").settings(SINGLE_SHARD_NO_REPLICAS).putMapping(mapping).build();
|
||||||
IndexMetadata meta2 = IndexMetadata.builder("index2").settings(SINGLE_SHARD_NO_REPLICAS).putMapping(mapping).build();
|
IndexMetadata meta2 = IndexMetadata.builder("index2").settings(SINGLE_SHARD_NO_REPLICAS).putMapping(mapping).build();
|
||||||
Metadata metadata = Metadata.builder().put(meta, false).put(meta2, false).build();
|
Metadata metadata = Metadata.builder().put(meta, false).put(meta2, false).build();
|
||||||
|
@ -204,10 +204,19 @@ public class MappingStatsTests extends AbstractWireSerializingTestCase<MappingSt
|
||||||
public void testToXContentWithSomeSharedMappings() {
|
public void testToXContentWithSomeSharedMappings() {
|
||||||
IndexMetadata meta = IndexMetadata.builder("index")
|
IndexMetadata meta = IndexMetadata.builder("index")
|
||||||
.settings(SINGLE_SHARD_NO_REPLICAS)
|
.settings(SINGLE_SHARD_NO_REPLICAS)
|
||||||
.putMapping(formatted(MAPPING_TEMPLATE, SCRIPT_1, SCRIPT_2, SCRIPT_3, SCRIPT_4, SCRIPT_3, SCRIPT_4, SCRIPT_1))
|
.putMapping(Strings.format(MAPPING_TEMPLATE, SCRIPT_1, SCRIPT_2, SCRIPT_3, SCRIPT_4, SCRIPT_3, SCRIPT_4, SCRIPT_1))
|
||||||
.build();
|
.build();
|
||||||
// make mappings that are slightly different because we shuffled 2 scripts between fields
|
// make mappings that are slightly different because we shuffled 2 scripts between fields
|
||||||
final String mappingString2 = formatted(MAPPING_TEMPLATE, SCRIPT_1, SCRIPT_2, SCRIPT_3, SCRIPT_4, SCRIPT_4, SCRIPT_3, SCRIPT_1);
|
final String mappingString2 = Strings.format(
|
||||||
|
MAPPING_TEMPLATE,
|
||||||
|
SCRIPT_1,
|
||||||
|
SCRIPT_2,
|
||||||
|
SCRIPT_3,
|
||||||
|
SCRIPT_4,
|
||||||
|
SCRIPT_4,
|
||||||
|
SCRIPT_3,
|
||||||
|
SCRIPT_1
|
||||||
|
);
|
||||||
IndexMetadata meta2 = IndexMetadata.builder("index2").settings(SINGLE_SHARD_NO_REPLICAS).putMapping(mappingString2).build();
|
IndexMetadata meta2 = IndexMetadata.builder("index2").settings(SINGLE_SHARD_NO_REPLICAS).putMapping(mappingString2).build();
|
||||||
IndexMetadata meta3 = IndexMetadata.builder("index3").settings(SINGLE_SHARD_NO_REPLICAS).putMapping(mappingString2).build();
|
IndexMetadata meta3 = IndexMetadata.builder("index3").settings(SINGLE_SHARD_NO_REPLICAS).putMapping(mappingString2).build();
|
||||||
Metadata metadata = Metadata.builder().put(meta, false).put(meta2, false).put(meta3, false).build();
|
Metadata metadata = Metadata.builder().put(meta, false).put(meta2, false).put(meta3, false).build();
|
||||||
|
|
|
@ -10,6 +10,7 @@ package org.elasticsearch.action.admin.indices.create;
|
||||||
|
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.test.client.NoOpClient;
|
import org.elasticsearch.test.client.NoOpClient;
|
||||||
import org.elasticsearch.xcontent.XContentBuilder;
|
import org.elasticsearch.xcontent.XContentBuilder;
|
||||||
|
@ -51,11 +52,11 @@ public class CreateIndexRequestBuilderTests extends ESTestCase {
|
||||||
|
|
||||||
ElasticsearchParseException e = expectThrows(
|
ElasticsearchParseException e = expectThrows(
|
||||||
ElasticsearchParseException.class,
|
ElasticsearchParseException.class,
|
||||||
() -> builder.setSource(formatted("{ \"%s\": \"%s\" }", KEY, VALUE), XContentType.JSON)
|
() -> { builder.setSource(Strings.format("{ \"%s\": \"%s\" }", KEY, VALUE), XContentType.JSON); }
|
||||||
);
|
);
|
||||||
assertEquals(formatted("unknown key [%s] for create index", KEY), e.getMessage());
|
assertEquals(Strings.format("unknown key [%s] for create index", KEY), e.getMessage());
|
||||||
|
|
||||||
builder.setSource(formatted("{ \"settings\": { \"%s\": \"%s\" }}", KEY, VALUE), XContentType.JSON);
|
builder.setSource(Strings.format("{ \"settings\": { \"%s\": \"%s\" }}", KEY, VALUE), XContentType.JSON);
|
||||||
assertEquals(VALUE, builder.request().settings().get(KEY));
|
assertEquals(VALUE, builder.request().settings().get(KEY));
|
||||||
|
|
||||||
XContentBuilder xContent = XContentFactory.jsonBuilder()
|
XContentBuilder xContent = XContentFactory.jsonBuilder()
|
||||||
|
|
|
@ -8,6 +8,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.admin.indices.get;
|
package org.elasticsearch.action.admin.indices.get;
|
||||||
|
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.rest.RestRequest;
|
import org.elasticsearch.rest.RestRequest;
|
||||||
import org.elasticsearch.rest.RestRequestTests;
|
import org.elasticsearch.rest.RestRequestTests;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
@ -68,6 +69,6 @@ public class GetIndexRequestTests extends ESTestCase {
|
||||||
|
|
||||||
RestRequest request = RestRequestTests.contentRestRequest("", Map.of("features", String.join(",", invalidFeatures)));
|
RestRequest request = RestRequestTests.contentRestRequest("", Map.of("features", String.join(",", invalidFeatures)));
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> GetIndexRequest.Feature.fromRequest(request));
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> GetIndexRequest.Feature.fromRequest(request));
|
||||||
assertThat(e.getMessage(), containsString(formatted("Invalid features specified [%s]", String.join(",", invalidFeatures))));
|
assertThat(e.getMessage(), containsString(Strings.format("Invalid features specified [%s]", String.join(",", invalidFeatures))));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -276,7 +276,7 @@ public class MetadataRolloverServiceTests extends ESTestCase {
|
||||||
String indexEndingInNumbers = indexPrefix + "-" + num;
|
String indexEndingInNumbers = indexPrefix + "-" + num;
|
||||||
assertThat(
|
assertThat(
|
||||||
MetadataRolloverService.generateRolloverIndexName(indexEndingInNumbers),
|
MetadataRolloverService.generateRolloverIndexName(indexEndingInNumbers),
|
||||||
equalTo(indexPrefix + "-" + formatted("%06d", num + 1))
|
equalTo(indexPrefix + "-" + Strings.format("%06d", num + 1))
|
||||||
);
|
);
|
||||||
assertThat(MetadataRolloverService.generateRolloverIndexName("index-name-1"), equalTo("index-name-000002"));
|
assertThat(MetadataRolloverService.generateRolloverIndexName("index-name-1"), equalTo("index-name-000002"));
|
||||||
assertThat(MetadataRolloverService.generateRolloverIndexName("index-name-2"), equalTo("index-name-000003"));
|
assertThat(MetadataRolloverService.generateRolloverIndexName("index-name-2"), equalTo("index-name-000003"));
|
||||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.cluster.metadata.ReservedStateMetadata;
|
||||||
import org.elasticsearch.cluster.service.ClusterService;
|
import org.elasticsearch.cluster.service.ClusterService;
|
||||||
import org.elasticsearch.common.settings.IndexScopedSettings;
|
import org.elasticsearch.common.settings.IndexScopedSettings;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.IndexService;
|
import org.elasticsearch.index.IndexService;
|
||||||
import org.elasticsearch.index.IndexSettingProviders;
|
import org.elasticsearch.index.IndexSettingProviders;
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
|
@ -49,7 +50,6 @@ import org.junit.Before;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
@ -804,7 +804,7 @@ public class ReservedComposableIndexTemplateActionTests extends ESTestCase {
|
||||||
|
|
||||||
try (
|
try (
|
||||||
XContentParser parser = XContentType.JSON.xContent()
|
XContentParser parser = XContentType.JSON.xContent()
|
||||||
.createParser(XContentParserConfiguration.EMPTY, String.format(Locale.ROOT, composableTemplate, "template_1"))
|
.createParser(XContentParserConfiguration.EMPTY, Strings.format(composableTemplate, "template_1"))
|
||||||
) {
|
) {
|
||||||
var request = action.fromXContent(parser).composableTemplates().get(0);
|
var request = action.fromXContent(parser).composableTemplates().get(0);
|
||||||
assertTrue(
|
assertTrue(
|
||||||
|
@ -817,7 +817,7 @@ public class ReservedComposableIndexTemplateActionTests extends ESTestCase {
|
||||||
|
|
||||||
try (
|
try (
|
||||||
XContentParser parser = XContentType.JSON.xContent()
|
XContentParser parser = XContentType.JSON.xContent()
|
||||||
.createParser(XContentParserConfiguration.EMPTY, String.format(Locale.ROOT, composableTemplate, "template_2"))
|
.createParser(XContentParserConfiguration.EMPTY, Strings.format(composableTemplate, "template_2"))
|
||||||
) {
|
) {
|
||||||
var request = action.fromXContent(parser).composableTemplates().get(0);
|
var request = action.fromXContent(parser).composableTemplates().get(0);
|
||||||
// this should just work, no failure
|
// this should just work, no failure
|
||||||
|
@ -829,7 +829,7 @@ public class ReservedComposableIndexTemplateActionTests extends ESTestCase {
|
||||||
final String conflictingTemplateName = "validate_template";
|
final String conflictingTemplateName = "validate_template";
|
||||||
|
|
||||||
// Reserve the validate_template name in the reserved metadata
|
// Reserve the validate_template name in the reserved metadata
|
||||||
String composableTemplate = String.format(Locale.ROOT, """
|
String composableTemplate = Strings.format("""
|
||||||
{
|
{
|
||||||
"composable_index_templates": {
|
"composable_index_templates": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
|
|
@ -22,6 +22,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
||||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.core.TimeValue;
|
import org.elasticsearch.core.TimeValue;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
@ -289,7 +290,7 @@ public class BulkProcessorTests extends ESTestCase {
|
||||||
Concurrent Bulk Requests: %s
|
Concurrent Bulk Requests: %s
|
||||||
""";
|
""";
|
||||||
fail(
|
fail(
|
||||||
formatted(
|
Strings.format(
|
||||||
message,
|
message,
|
||||||
expectedExecutions,
|
expectedExecutions,
|
||||||
requestCount.get(),
|
requestCount.get(),
|
||||||
|
@ -420,7 +421,7 @@ public class BulkProcessorTests extends ESTestCase {
|
||||||
Concurrent Bulk Requests: %d
|
Concurrent Bulk Requests: %d
|
||||||
""";
|
""";
|
||||||
fail(
|
fail(
|
||||||
formatted(
|
Strings.format(
|
||||||
message,
|
message,
|
||||||
requestCount.get(),
|
requestCount.get(),
|
||||||
successCount.get(),
|
successCount.get(),
|
||||||
|
|
|
@ -11,6 +11,7 @@ package org.elasticsearch.action.fieldcaps;
|
||||||
import org.apache.lucene.util.ArrayUtil;
|
import org.apache.lucene.util.ArrayUtil;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.util.iterable.Iterables;
|
import org.elasticsearch.common.util.iterable.Iterables;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.mapper.TimeSeriesParams;
|
import org.elasticsearch.index.mapper.TimeSeriesParams;
|
||||||
import org.elasticsearch.test.AbstractXContentSerializingTestCase;
|
import org.elasticsearch.test.AbstractXContentSerializingTestCase;
|
||||||
import org.elasticsearch.xcontent.XContentParser;
|
import org.elasticsearch.xcontent.XContentParser;
|
||||||
|
@ -21,7 +22,6 @@ import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.IntStream;
|
import java.util.stream.IntStream;
|
||||||
|
@ -167,9 +167,7 @@ public class FieldCapabilitiesTests extends AbstractXContentSerializingTestCase<
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRandomBuilder() {
|
public void testRandomBuilder() {
|
||||||
String[] indices = IntStream.range(0, randomIntBetween(1, 50))
|
String[] indices = IntStream.range(0, randomIntBetween(1, 50)).mapToObj(n -> Strings.format("index_%2d", n)).toArray(String[]::new);
|
||||||
.mapToObj(n -> String.format(Locale.ROOT, "index_%2d", n))
|
|
||||||
.toArray(String[]::new);
|
|
||||||
|
|
||||||
List<String> nonSearchableIndices = new ArrayList<>();
|
List<String> nonSearchableIndices = new ArrayList<>();
|
||||||
List<String> nonAggregatableIndices = new ArrayList<>();
|
List<String> nonAggregatableIndices = new ArrayList<>();
|
||||||
|
@ -238,7 +236,7 @@ public class FieldCapabilitiesTests extends AbstractXContentSerializingTestCase<
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testBuilderSingleMetricType() {
|
public void testBuilderSingleMetricType() {
|
||||||
List<String> indices = IntStream.range(0, randomIntBetween(1, 50)).mapToObj(n -> formatted("index_%2d", n)).toList();
|
List<String> indices = IntStream.range(0, randomIntBetween(1, 50)).mapToObj(n -> Strings.format("index_%2d", n)).toList();
|
||||||
TimeSeriesParams.MetricType metric = randomBoolean() ? null : randomFrom(TimeSeriesParams.MetricType.values());
|
TimeSeriesParams.MetricType metric = randomBoolean() ? null : randomFrom(TimeSeriesParams.MetricType.values());
|
||||||
FieldCapabilities.Builder builder = new FieldCapabilities.Builder("field", "type");
|
FieldCapabilities.Builder builder = new FieldCapabilities.Builder("field", "type");
|
||||||
for (String index : indices) {
|
for (String index : indices) {
|
||||||
|
@ -250,7 +248,7 @@ public class FieldCapabilitiesTests extends AbstractXContentSerializingTestCase<
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testBuilderMixedMetricType() {
|
public void testBuilderMixedMetricType() {
|
||||||
List<String> indices = IntStream.range(0, randomIntBetween(1, 50)).mapToObj(n -> formatted("index_%2d", n)).toList();
|
List<String> indices = IntStream.range(0, randomIntBetween(1, 50)).mapToObj(n -> Strings.format("index_%2d", n)).toList();
|
||||||
Map<String, TimeSeriesParams.MetricType> metricTypes = new HashMap<>();
|
Map<String, TimeSeriesParams.MetricType> metricTypes = new HashMap<>();
|
||||||
for (String index : indices) {
|
for (String index : indices) {
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
|
|
|
@ -20,6 +20,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.time.DateFormatter;
|
import org.elasticsearch.common.time.DateFormatter;
|
||||||
import org.elasticsearch.common.time.FormatNames;
|
import org.elasticsearch.common.time.FormatNames;
|
||||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.core.Tuple;
|
import org.elasticsearch.core.Tuple;
|
||||||
import org.elasticsearch.index.VersionType;
|
import org.elasticsearch.index.VersionType;
|
||||||
import org.elasticsearch.index.seqno.SequenceNumbers;
|
import org.elasticsearch.index.seqno.SequenceNumbers;
|
||||||
|
@ -145,12 +146,7 @@ public class IndexRequestTests extends ESTestCase {
|
||||||
assertEquals(total, indexResponse.getShardInfo().getTotal());
|
assertEquals(total, indexResponse.getShardInfo().getTotal());
|
||||||
assertEquals(successful, indexResponse.getShardInfo().getSuccessful());
|
assertEquals(successful, indexResponse.getShardInfo().getSuccessful());
|
||||||
assertEquals(forcedRefresh, indexResponse.forcedRefresh());
|
assertEquals(forcedRefresh, indexResponse.forcedRefresh());
|
||||||
assertEquals(
|
Object[] args = new Object[] {
|
||||||
formatted(
|
|
||||||
"""
|
|
||||||
IndexResponse[index=%s,id=%s,version=%s,result=%s,seqNo=%s,primaryTerm=%s,shards=\
|
|
||||||
{"total":%s,"successful":%s,"failed":0}]\
|
|
||||||
""",
|
|
||||||
shardId.getIndexName(),
|
shardId.getIndexName(),
|
||||||
id,
|
id,
|
||||||
version,
|
version,
|
||||||
|
@ -158,10 +154,11 @@ public class IndexRequestTests extends ESTestCase {
|
||||||
SequenceNumbers.UNASSIGNED_SEQ_NO,
|
SequenceNumbers.UNASSIGNED_SEQ_NO,
|
||||||
0,
|
0,
|
||||||
total,
|
total,
|
||||||
successful
|
successful };
|
||||||
),
|
assertEquals(Strings.format("""
|
||||||
indexResponse.toString()
|
IndexResponse[index=%s,id=%s,version=%s,result=%s,seqNo=%s,primaryTerm=%s,shards=\
|
||||||
);
|
{"total":%s,"successful":%s,"failed":0}]\
|
||||||
|
""", args), indexResponse.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIndexRequestXContentSerialization() throws IOException {
|
public void testIndexRequestXContentSerialization() throws IOException {
|
||||||
|
@ -257,7 +254,7 @@ public class IndexRequestTests extends ESTestCase {
|
||||||
request.source(source, XContentType.JSON);
|
request.source(source, XContentType.JSON);
|
||||||
assertEquals("index {[index][null], source[" + source + "]}", request.toString());
|
assertEquals("index {[index][null], source[" + source + "]}", request.toString());
|
||||||
|
|
||||||
source = formatted("""
|
source = Strings.format("""
|
||||||
{"name":"%s"}
|
{"name":"%s"}
|
||||||
""", randomUnicodeOfLength(IndexRequest.MAX_SOURCE_LENGTH_IN_TOSTRING));
|
""", randomUnicodeOfLength(IndexRequest.MAX_SOURCE_LENGTH_IN_TOSTRING));
|
||||||
request.source(source, XContentType.JSON);
|
request.source(source, XContentType.JSON);
|
||||||
|
|
|
@ -57,7 +57,7 @@ public class MainResponseTests extends AbstractXContentSerializingTestCase<MainR
|
||||||
response.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
response.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||||
assertEquals(
|
assertEquals(
|
||||||
XContentHelper.stripWhitespace(
|
XContentHelper.stripWhitespace(
|
||||||
formatted(
|
Strings.format(
|
||||||
"""
|
"""
|
||||||
{
|
{
|
||||||
"name": "nodeName",
|
"name": "nodeName",
|
||||||
|
|
|
@ -195,7 +195,7 @@ public class MultiSearchRequestTests extends ESTestCase {
|
||||||
tookInMillis
|
tookInMillis
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(XContentHelper.stripWhitespace(formatted("""
|
assertEquals(XContentHelper.stripWhitespace(Strings.format("""
|
||||||
{
|
{
|
||||||
"took": %s,
|
"took": %s,
|
||||||
"responses": [
|
"responses": [
|
||||||
|
|
|
@ -36,7 +36,7 @@ public class ReplicationResponseTests extends ESTestCase {
|
||||||
final int total = 5;
|
final int total = 5;
|
||||||
final int successful = randomIntBetween(1, total);
|
final int successful = randomIntBetween(1, total);
|
||||||
final ShardInfo shardInfo = new ShardInfo(total, successful);
|
final ShardInfo shardInfo = new ShardInfo(total, successful);
|
||||||
assertEquals(formatted("ShardInfo{total=5, successful=%d, failures=[]}", successful), shardInfo.toString());
|
assertEquals(Strings.format("ShardInfo{total=5, successful=%d, failures=[]}", successful), shardInfo.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testShardInfoToXContent() throws IOException {
|
public void testShardInfoToXContent() throws IOException {
|
||||||
|
|
|
@ -47,6 +47,7 @@ import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.util.PageCacheRecycler;
|
import org.elasticsearch.common.util.PageCacheRecycler;
|
||||||
import org.elasticsearch.core.Nullable;
|
import org.elasticsearch.core.Nullable;
|
||||||
import org.elasticsearch.core.Releasable;
|
import org.elasticsearch.core.Releasable;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.IndexNotFoundException;
|
import org.elasticsearch.index.IndexNotFoundException;
|
||||||
import org.elasticsearch.index.IndexService;
|
import org.elasticsearch.index.IndexService;
|
||||||
|
@ -88,7 +89,6 @@ import java.util.Collections;
|
||||||
import java.util.EnumSet;
|
import java.util.EnumSet;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
@ -1582,7 +1582,7 @@ public class TransportReplicationActionTests extends ESTestCase {
|
||||||
final long primaryTerm = indexShard.getPendingPrimaryTerm();
|
final long primaryTerm = indexShard.getPendingPrimaryTerm();
|
||||||
if (term < primaryTerm) {
|
if (term < primaryTerm) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
String.format(Locale.ROOT, "%s operation term [%d] is too old (current [%d])", shardId, term, primaryTerm)
|
Strings.format("%s operation term [%d] is too old (current [%d])", shardId, term, primaryTerm)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
count.incrementAndGet();
|
count.incrementAndGet();
|
||||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.cluster.service.ClusterService;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.core.Releasable;
|
import org.elasticsearch.core.Releasable;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.IndexService;
|
import org.elasticsearch.index.IndexService;
|
||||||
import org.elasticsearch.index.IndexingPressure;
|
import org.elasticsearch.index.IndexingPressure;
|
||||||
|
@ -57,7 +58,6 @@ import org.mockito.ArgumentCaptor;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
@ -521,7 +521,7 @@ public class TransportWriteActionTests extends ESTestCase {
|
||||||
final long primaryTerm = indexShard.getPendingPrimaryTerm();
|
final long primaryTerm = indexShard.getPendingPrimaryTerm();
|
||||||
if (term < primaryTerm) {
|
if (term < primaryTerm) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
String.format(Locale.ROOT, "%s operation term [%d] is too old (current [%d])", shardId, term, primaryTerm)
|
Strings.format("%s operation term [%d] is too old (current [%d])", shardId, term, primaryTerm)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
count.incrementAndGet();
|
count.incrementAndGet();
|
||||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.tests.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.analysis.PreConfiguredTokenizer;
|
import org.elasticsearch.index.analysis.PreConfiguredTokenizer;
|
||||||
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
||||||
import org.elasticsearch.indices.analysis.AnalysisModule;
|
import org.elasticsearch.indices.analysis.AnalysisModule;
|
||||||
|
@ -191,7 +192,7 @@ public class GetTermVectorsTests extends ESSingleNodeTestCase {
|
||||||
for (int k = 0; k < docsAndPositions.freq(); k++) {
|
for (int k = 0; k < docsAndPositions.freq(); k++) {
|
||||||
docsAndPositions.nextPosition();
|
docsAndPositions.nextPosition();
|
||||||
if (docsAndPositions.getPayload() != null) {
|
if (docsAndPositions.getPayload() != null) {
|
||||||
String infoString = formatted("""
|
String infoString = Strings.format("""
|
||||||
|
|
||||||
term: %s has payload\s
|
term: %s has payload\s
|
||||||
%s
|
%s
|
||||||
|
|
|
@ -146,7 +146,7 @@ public class ClusterStateTests extends ESTestCase {
|
||||||
);
|
);
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
|
||||||
assertEquals(XContentHelper.stripWhitespace(formatted("""
|
assertEquals(XContentHelper.stripWhitespace(Strings.format("""
|
||||||
{
|
{
|
||||||
"cluster_uuid": "clusterUUID",
|
"cluster_uuid": "clusterUUID",
|
||||||
"version": 0,
|
"version": 0,
|
||||||
|
@ -377,7 +377,7 @@ public class ClusterStateTests extends ESTestCase {
|
||||||
writeChunks(clusterState, builder, new ToXContent.MapParams(mapParams), 34);
|
writeChunks(clusterState, builder, new ToXContent.MapParams(mapParams), 34);
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
|
||||||
assertEquals(formatted("""
|
assertEquals(Strings.format("""
|
||||||
{
|
{
|
||||||
"cluster_uuid" : "clusterUUID",
|
"cluster_uuid" : "clusterUUID",
|
||||||
"version" : 0,
|
"version" : 0,
|
||||||
|
@ -605,7 +605,7 @@ public class ClusterStateTests extends ESTestCase {
|
||||||
writeChunks(clusterState, builder, new ToXContent.MapParams(mapParams), 34);
|
writeChunks(clusterState, builder, new ToXContent.MapParams(mapParams), 34);
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
|
||||||
assertEquals(formatted("""
|
assertEquals(Strings.format("""
|
||||||
{
|
{
|
||||||
"cluster_uuid" : "clusterUUID",
|
"cluster_uuid" : "clusterUUID",
|
||||||
"version" : 0,
|
"version" : 0,
|
||||||
|
@ -857,7 +857,7 @@ public class ClusterStateTests extends ESTestCase {
|
||||||
writeChunks(clusterState, builder, ToXContent.EMPTY_PARAMS, 25);
|
writeChunks(clusterState, builder, ToXContent.EMPTY_PARAMS, 25);
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
|
||||||
assertEquals(formatted("""
|
assertEquals(Strings.format("""
|
||||||
{
|
{
|
||||||
"cluster_uuid" : "clusterUUID",
|
"cluster_uuid" : "clusterUUID",
|
||||||
"version" : 0,
|
"version" : 0,
|
||||||
|
|
|
@ -21,6 +21,7 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
|
||||||
import org.elasticsearch.cluster.routing.RoutingNodesHelper;
|
import org.elasticsearch.cluster.routing.RoutingNodesHelper;
|
||||||
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.indices.cluster.ClusterStateChanges;
|
import org.elasticsearch.indices.cluster.ClusterStateChanges;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.test.VersionUtils;
|
import org.elasticsearch.test.VersionUtils;
|
||||||
|
@ -100,7 +101,7 @@ public class AutoExpandReplicasTests extends ESTestCase {
|
||||||
protected DiscoveryNode createNode(Version version, DiscoveryNodeRole... mustHaveRoles) {
|
protected DiscoveryNode createNode(Version version, DiscoveryNodeRole... mustHaveRoles) {
|
||||||
Set<DiscoveryNodeRole> roles = new HashSet<>(randomSubsetOf(DiscoveryNodeRole.roles()));
|
Set<DiscoveryNodeRole> roles = new HashSet<>(randomSubsetOf(DiscoveryNodeRole.roles()));
|
||||||
Collections.addAll(roles, mustHaveRoles);
|
Collections.addAll(roles, mustHaveRoles);
|
||||||
final String id = formatted("node_%03d", nodeIdGenerator.incrementAndGet());
|
final String id = Strings.format("node_%03d", nodeIdGenerator.incrementAndGet());
|
||||||
return new DiscoveryNode(id, id, buildNewFakeTransportAddress(), Collections.emptyMap(), roles, version);
|
return new DiscoveryNode(id, id, buildNewFakeTransportAddress(), Collections.emptyMap(), roles, version);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -206,10 +206,10 @@ public class ComponentTemplateTests extends SimpleDiffableSerializationTestCase<
|
||||||
|
|
||||||
{
|
{
|
||||||
String randomString = randomAlphaOfLength(10);
|
String randomString = randomAlphaOfLength(10);
|
||||||
CompressedXContent m1 = new CompressedXContent(formatted("""
|
CompressedXContent m1 = new CompressedXContent(Strings.format("""
|
||||||
{"properties":{"%s":{"type":"keyword"}}}
|
{"properties":{"%s":{"type":"keyword"}}}
|
||||||
""", randomString));
|
""", randomString));
|
||||||
CompressedXContent m2 = new CompressedXContent(formatted("""
|
CompressedXContent m2 = new CompressedXContent(Strings.format("""
|
||||||
{"properties":{"%s":{"type":"keyword"}}}
|
{"properties":{"%s":{"type":"keyword"}}}
|
||||||
""", randomString));
|
""", randomString));
|
||||||
assertThat(Template.mappingsEquals(m1, m2), equalTo(true));
|
assertThat(Template.mappingsEquals(m1, m2), equalTo(true));
|
||||||
|
@ -217,14 +217,14 @@ public class ComponentTemplateTests extends SimpleDiffableSerializationTestCase<
|
||||||
|
|
||||||
{
|
{
|
||||||
CompressedXContent m1 = randomMappings();
|
CompressedXContent m1 = randomMappings();
|
||||||
CompressedXContent m2 = new CompressedXContent(formatted("""
|
CompressedXContent m2 = new CompressedXContent(Strings.format("""
|
||||||
{"properties":{"%s":{"type":"keyword"}}}
|
{"properties":{"%s":{"type":"keyword"}}}
|
||||||
""", randomAlphaOfLength(10)));
|
""", randomAlphaOfLength(10)));
|
||||||
assertThat(Template.mappingsEquals(m1, m2), equalTo(false));
|
assertThat(Template.mappingsEquals(m1, m2), equalTo(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
Map<String, Object> map = XContentHelper.convertToMap(new BytesArray(formatted("""
|
Map<String, Object> map = XContentHelper.convertToMap(new BytesArray(Strings.format("""
|
||||||
{"%s":{"properties":{"%s":{"type":"keyword"}}}}
|
{"%s":{"properties":{"%s":{"type":"keyword"}}}}
|
||||||
""", MapperService.SINGLE_MAPPING_NAME, randomAlphaOfLength(10))), true, XContentType.JSON).v2();
|
""", MapperService.SINGLE_MAPPING_NAME, randomAlphaOfLength(10))), true, XContentType.JSON).v2();
|
||||||
Map<String, Object> reduceMap = Template.reduceMapping(map);
|
Map<String, Object> reduceMap = Template.reduceMapping(map);
|
||||||
|
|
|
@ -183,7 +183,7 @@ public class DataStreamTests extends AbstractXContentSerializingTestCase<DataStr
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> original.removeBackingIndex(indexToRemove));
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> original.removeBackingIndex(indexToRemove));
|
||||||
assertThat(
|
assertThat(
|
||||||
e.getMessage(),
|
e.getMessage(),
|
||||||
equalTo(String.format(Locale.ROOT, "index [%s] is not part of data stream [%s]", indexToRemove.getName(), dataStreamName))
|
equalTo(Strings.format("index [%s] is not part of data stream [%s]", indexToRemove.getName(), dataStreamName))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -398,7 +398,7 @@ public class DataStreamTests extends AbstractXContentSerializingTestCase<DataStr
|
||||||
long epochMillis = randomLongBetween(1580536800000L, 1583042400000L);
|
long epochMillis = randomLongBetween(1580536800000L, 1583042400000L);
|
||||||
String dateString = DataStream.DATE_FORMATTER.formatMillis(epochMillis);
|
String dateString = DataStream.DATE_FORMATTER.formatMillis(epochMillis);
|
||||||
String defaultBackingIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, backingIndexNum, epochMillis);
|
String defaultBackingIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, backingIndexNum, epochMillis);
|
||||||
String expectedBackingIndexName = String.format(Locale.ROOT, ".ds-%s-%s-%06d", dataStreamName, dateString, backingIndexNum);
|
String expectedBackingIndexName = Strings.format(".ds-%s-%s-%06d", dataStreamName, dateString, backingIndexNum);
|
||||||
assertThat(defaultBackingIndexName, equalTo(expectedBackingIndexName));
|
assertThat(defaultBackingIndexName, equalTo(expectedBackingIndexName));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,6 +11,7 @@ import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.xcontent.DeprecationHandler;
|
import org.elasticsearch.xcontent.DeprecationHandler;
|
||||||
import org.elasticsearch.xcontent.NamedXContentRegistry;
|
import org.elasticsearch.xcontent.NamedXContentRegistry;
|
||||||
|
@ -31,7 +32,7 @@ public class IndexTemplateMetadataTests extends ESTestCase {
|
||||||
|
|
||||||
public void testIndexTemplateMetadataXContentRoundTrip() throws Exception {
|
public void testIndexTemplateMetadataXContentRoundTrip() throws Exception {
|
||||||
|
|
||||||
String template = formatted("""
|
String template = Strings.format("""
|
||||||
{
|
{
|
||||||
"index_patterns": [ ".test-*" ],
|
"index_patterns": [ ".test-*" ],
|
||||||
"order": 1000,
|
"order": 1000,
|
||||||
|
@ -112,7 +113,7 @@ public class IndexTemplateMetadataTests extends ESTestCase {
|
||||||
});
|
});
|
||||||
assertThat(nullPatternError.getMessage(), equalTo("Index patterns must not be null or empty; got null"));
|
assertThat(nullPatternError.getMessage(), equalTo("Index patterns must not be null or empty; got null"));
|
||||||
|
|
||||||
final String templateWithEmptyPattern = formatted("""
|
final String templateWithEmptyPattern = Strings.format("""
|
||||||
{
|
{
|
||||||
"index_patterns": [],
|
"index_patterns": [],
|
||||||
"order": 1000,
|
"order": 1000,
|
||||||
|
@ -145,7 +146,7 @@ public class IndexTemplateMetadataTests extends ESTestCase {
|
||||||
assertThat(ex.getMessage(), equalTo("Index patterns must not be null or empty; got []"));
|
assertThat(ex.getMessage(), equalTo("Index patterns must not be null or empty; got []"));
|
||||||
}
|
}
|
||||||
|
|
||||||
final String templateWithoutPattern = formatted("""
|
final String templateWithoutPattern = Strings.format("""
|
||||||
{
|
{
|
||||||
"order": 1000,
|
"order": 1000,
|
||||||
"settings": {
|
"settings": {
|
||||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.util.Maps;
|
import org.elasticsearch.common.util.Maps;
|
||||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||||
import org.elasticsearch.common.util.set.Sets;
|
import org.elasticsearch.common.util.set.Sets;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.core.Tuple;
|
import org.elasticsearch.core.Tuple;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.threadpool.TestThreadPool;
|
import org.elasticsearch.threadpool.TestThreadPool;
|
||||||
|
@ -189,7 +190,7 @@ public class TemplateUpgradeServiceTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
Map<String, BytesReference> additions = Maps.newMapWithExpectedSize(additionsCount);
|
Map<String, BytesReference> additions = Maps.newMapWithExpectedSize(additionsCount);
|
||||||
for (int i = 0; i < additionsCount; i++) {
|
for (int i = 0; i < additionsCount; i++) {
|
||||||
additions.put("add_template_" + i, new BytesArray(formatted("""
|
additions.put("add_template_" + i, new BytesArray(Strings.format("""
|
||||||
{"index_patterns" : "*", "order" : %s}
|
{"index_patterns" : "*", "order" : %s}
|
||||||
""", i)));
|
""", i)));
|
||||||
}
|
}
|
||||||
|
|
|
@ -230,7 +230,7 @@ public class ToAndFromJsonMetadataTests extends ESTestCase {
|
||||||
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
|
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
|
||||||
assertEquals(formatted("""
|
assertEquals(Strings.format("""
|
||||||
{
|
{
|
||||||
"meta-data" : {
|
"meta-data" : {
|
||||||
"version" : 0,
|
"version" : 0,
|
||||||
|
@ -318,7 +318,7 @@ public class ToAndFromJsonMetadataTests extends ESTestCase {
|
||||||
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
|
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
|
||||||
assertEquals(formatted("""
|
assertEquals(Strings.format("""
|
||||||
{
|
{
|
||||||
"metadata" : {
|
"metadata" : {
|
||||||
"cluster_uuid" : "clusterUUID",
|
"cluster_uuid" : "clusterUUID",
|
||||||
|
@ -389,7 +389,7 @@ public class ToAndFromJsonMetadataTests extends ESTestCase {
|
||||||
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
|
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
|
||||||
assertEquals(formatted("""
|
assertEquals(Strings.format("""
|
||||||
{
|
{
|
||||||
"meta-data" : {
|
"meta-data" : {
|
||||||
"version" : 0,
|
"version" : 0,
|
||||||
|
@ -455,7 +455,7 @@ public class ToAndFromJsonMetadataTests extends ESTestCase {
|
||||||
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
|
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
|
||||||
assertEquals(formatted("""
|
assertEquals(Strings.format("""
|
||||||
{
|
{
|
||||||
"metadata" : {
|
"metadata" : {
|
||||||
"cluster_uuid" : "clusterUUID",
|
"cluster_uuid" : "clusterUUID",
|
||||||
|
@ -558,7 +558,7 @@ public class ToAndFromJsonMetadataTests extends ESTestCase {
|
||||||
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
|
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
|
||||||
assertEquals(formatted("""
|
assertEquals(Strings.format("""
|
||||||
{
|
{
|
||||||
"metadata" : {
|
"metadata" : {
|
||||||
"cluster_uuid" : "clusterUUID",
|
"cluster_uuid" : "clusterUUID",
|
||||||
|
@ -693,7 +693,7 @@ public class ToAndFromJsonMetadataTests extends ESTestCase {
|
||||||
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
|
ChunkedToXContent.wrapAsToXContent(metadata).toXContent(builder, new ToXContent.MapParams(mapParams));
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
|
||||||
assertEquals(formatted("""
|
assertEquals(Strings.format("""
|
||||||
{
|
{
|
||||||
"metadata" : {
|
"metadata" : {
|
||||||
"cluster_uuid" : "clusterUUID",
|
"cluster_uuid" : "clusterUUID",
|
||||||
|
|
|
@ -13,13 +13,13 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus;
|
import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus;
|
||||||
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
|
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
|
||||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
|
||||||
|
|
||||||
import static java.util.Collections.emptyList;
|
import static java.util.Collections.emptyList;
|
||||||
import static java.util.Collections.emptyMap;
|
import static java.util.Collections.emptyMap;
|
||||||
|
@ -58,10 +58,7 @@ public class AllocateUnassignedDecisionTests extends ESTestCase {
|
||||||
if (allocationStatus == AllocationStatus.FETCHING_SHARD_DATA) {
|
if (allocationStatus == AllocationStatus.FETCHING_SHARD_DATA) {
|
||||||
assertEquals(Explanations.Allocation.AWAITING_INFO, noDecision.getExplanation());
|
assertEquals(Explanations.Allocation.AWAITING_INFO, noDecision.getExplanation());
|
||||||
} else if (allocationStatus == AllocationStatus.DELAYED_ALLOCATION) {
|
} else if (allocationStatus == AllocationStatus.DELAYED_ALLOCATION) {
|
||||||
assertThat(
|
assertThat(noDecision.getExplanation(), equalTo(Strings.format(Explanations.Allocation.DELAYED_WITHOUT_ALTERNATIVE, "0s")));
|
||||||
noDecision.getExplanation(),
|
|
||||||
equalTo(String.format(Locale.ROOT, Explanations.Allocation.DELAYED_WITHOUT_ALTERNATIVE, "0s"))
|
|
||||||
);
|
|
||||||
} else {
|
} else {
|
||||||
assertThat(noDecision.getExplanation(), equalTo(Explanations.Allocation.NO_COPIES));
|
assertThat(noDecision.getExplanation(), equalTo(Explanations.Allocation.NO_COPIES));
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.cluster.routing.RoutingTable;
|
||||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
|
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.indices.cluster.ClusterStateChanges;
|
import org.elasticsearch.indices.cluster.ClusterStateChanges;
|
||||||
import org.elasticsearch.test.VersionUtils;
|
import org.elasticsearch.test.VersionUtils;
|
||||||
import org.elasticsearch.threadpool.TestThreadPool;
|
import org.elasticsearch.threadpool.TestThreadPool;
|
||||||
|
@ -220,7 +221,7 @@ public class FailedNodeRoutingTests extends ESAllocationTestCase {
|
||||||
protected DiscoveryNode createNode(DiscoveryNodeRole... mustHaveRoles) {
|
protected DiscoveryNode createNode(DiscoveryNodeRole... mustHaveRoles) {
|
||||||
Set<DiscoveryNodeRole> roles = new HashSet<>(randomSubsetOf(DiscoveryNodeRole.roles()));
|
Set<DiscoveryNodeRole> roles = new HashSet<>(randomSubsetOf(DiscoveryNodeRole.roles()));
|
||||||
Collections.addAll(roles, mustHaveRoles);
|
Collections.addAll(roles, mustHaveRoles);
|
||||||
final String id = formatted("node_%03d", nodeIdGenerator.incrementAndGet());
|
final String id = Strings.format("node_%03d", nodeIdGenerator.incrementAndGet());
|
||||||
return new DiscoveryNode(
|
return new DiscoveryNode(
|
||||||
id,
|
id,
|
||||||
id,
|
id,
|
||||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.Decision;
|
||||||
import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider;
|
import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider;
|
||||||
import org.elasticsearch.common.settings.ClusterSettings;
|
import org.elasticsearch.common.settings.ClusterSettings;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.snapshots.SnapshotShardSizeInfo;
|
import org.elasticsearch.snapshots.SnapshotShardSizeInfo;
|
||||||
|
|
||||||
|
@ -232,7 +233,7 @@ public class SameShardRoutingTests extends ESAllocationTestCase {
|
||||||
assertThat(
|
assertThat(
|
||||||
decision.getExplanation(),
|
decision.getExplanation(),
|
||||||
equalTo(
|
equalTo(
|
||||||
formatted(
|
Strings.format(
|
||||||
"""
|
"""
|
||||||
cannot allocate to node [%s] because a copy of this shard is already allocated to node [%s] with the same host \
|
cannot allocate to node [%s] because a copy of this shard is already allocated to node [%s] with the same host \
|
||||||
address [%s] and [%s] is [true] which forbids more than one node on each host from holding a copy of this shard\
|
address [%s] and [%s] is [true] which forbids more than one node on each host from holding a copy of this shard\
|
||||||
|
|
|
@ -21,6 +21,7 @@ import org.elasticsearch.common.util.PageCacheRecycler;
|
||||||
import org.elasticsearch.common.util.set.Sets;
|
import org.elasticsearch.common.util.set.Sets;
|
||||||
import org.elasticsearch.core.CheckedConsumer;
|
import org.elasticsearch.core.CheckedConsumer;
|
||||||
import org.elasticsearch.core.CheckedFunction;
|
import org.elasticsearch.core.CheckedFunction;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.core.Tuple;
|
import org.elasticsearch.core.Tuple;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
@ -76,7 +77,7 @@ public abstract class AbstractStreamTests extends ESTestCase {
|
||||||
final byte[] corruptBytes = new byte[] { randomFrom(set) };
|
final byte[] corruptBytes = new byte[] { randomFrom(set) };
|
||||||
final BytesReference corrupt = new BytesArray(corruptBytes);
|
final BytesReference corrupt = new BytesArray(corruptBytes);
|
||||||
final IllegalStateException e = expectThrows(IllegalStateException.class, () -> getStreamInput(corrupt).readBoolean());
|
final IllegalStateException e = expectThrows(IllegalStateException.class, () -> getStreamInput(corrupt).readBoolean());
|
||||||
final String message = formatted("unexpected byte [0x%02x]", corruptBytes[0]);
|
final String message = Strings.format("unexpected byte [0x%02x]", corruptBytes[0]);
|
||||||
assertThat(e, hasToString(containsString(message)));
|
assertThat(e, hasToString(containsString(message)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -111,7 +112,7 @@ public abstract class AbstractStreamTests extends ESTestCase {
|
||||||
final byte[] corruptBytes = new byte[] { randomFrom(set) };
|
final byte[] corruptBytes = new byte[] { randomFrom(set) };
|
||||||
final BytesReference corrupt = new BytesArray(corruptBytes);
|
final BytesReference corrupt = new BytesArray(corruptBytes);
|
||||||
final IllegalStateException e = expectThrows(IllegalStateException.class, () -> getStreamInput(corrupt).readOptionalBoolean());
|
final IllegalStateException e = expectThrows(IllegalStateException.class, () -> getStreamInput(corrupt).readOptionalBoolean());
|
||||||
final String message = formatted("unexpected byte [0x%02x]", corruptBytes[0]);
|
final String message = Strings.format("unexpected byte [0x%02x]", corruptBytes[0]);
|
||||||
assertThat(e, hasToString(containsString(message)));
|
assertThat(e, hasToString(containsString(message)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
package org.elasticsearch.common.time;
|
package org.elasticsearch.common.time;
|
||||||
|
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
|
@ -403,7 +404,7 @@ public class JavaDateMathParserTests extends ESTestCase {
|
||||||
long expectedMillis = parser.parse(expected, () -> 0).toEpochMilli();
|
long expectedMillis = parser.parse(expected, () -> 0).toEpochMilli();
|
||||||
if (gotMillis != expectedMillis) {
|
if (gotMillis != expectedMillis) {
|
||||||
ZonedDateTime zonedDateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(gotMillis), ZoneOffset.UTC);
|
ZonedDateTime zonedDateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(gotMillis), ZoneOffset.UTC);
|
||||||
fail(formatted("""
|
fail(Strings.format("""
|
||||||
Date math not equal
|
Date math not equal
|
||||||
Original : %s
|
Original : %s
|
||||||
Parsed : %s
|
Parsed : %s
|
||||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
||||||
import org.elasticsearch.cluster.routing.TestShardRouting;
|
import org.elasticsearch.cluster.routing.TestShardRouting;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
@ -198,7 +199,7 @@ public class PriorityComparatorTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
// else sometimes just use the defaults
|
// else sometimes just use the defaults
|
||||||
|
|
||||||
indices[i] = IndexMetadata.builder(formatted("idx_%04d", i))
|
indices[i] = IndexMetadata.builder(Strings.format("idx_%04d", i))
|
||||||
.system(isSystem)
|
.system(isSystem)
|
||||||
.settings(buildSettings(creationDate, priority))
|
.settings(buildSettings(creationDate, priority))
|
||||||
.build();
|
.build();
|
||||||
|
|
|
@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.time.DateFormatter;
|
import org.elasticsearch.common.time.DateFormatter;
|
||||||
import org.elasticsearch.common.time.DateUtils;
|
import org.elasticsearch.common.time.DateUtils;
|
||||||
import org.elasticsearch.core.CheckedConsumer;
|
import org.elasticsearch.core.CheckedConsumer;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.core.Tuple;
|
import org.elasticsearch.core.Tuple;
|
||||||
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
|
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
|
||||||
import org.elasticsearch.script.DateFieldScript;
|
import org.elasticsearch.script.DateFieldScript;
|
||||||
|
@ -531,7 +532,7 @@ public class DateFieldMapperTests extends MapperTestCase {
|
||||||
private String randomIs8601Nanos(long maxMillis) {
|
private String randomIs8601Nanos(long maxMillis) {
|
||||||
String date = DateFieldMapper.DEFAULT_DATE_TIME_NANOS_FORMATTER.formatMillis(randomLongBetween(0, maxMillis));
|
String date = DateFieldMapper.DEFAULT_DATE_TIME_NANOS_FORMATTER.formatMillis(randomLongBetween(0, maxMillis));
|
||||||
date = date.substring(0, date.length() - 1); // Strip off trailing "Z"
|
date = date.substring(0, date.length() - 1); // Strip off trailing "Z"
|
||||||
return date + formatted("%06d", between(0, 999999)) + "Z"; // Add nanos and the "Z"
|
return date + Strings.format("%06d", between(0, 999999)) + "Z"; // Add nanos and the "Z"
|
||||||
}
|
}
|
||||||
|
|
||||||
private String randomDecimalNanos(long maxMillis) {
|
private String randomDecimalNanos(long maxMillis) {
|
||||||
|
|
|
@ -8,6 +8,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
import org.elasticsearch.xcontent.json.JsonStringEncoder;
|
import org.elasticsearch.xcontent.json.JsonStringEncoder;
|
||||||
|
|
||||||
|
@ -39,7 +40,7 @@ public abstract class AbstractTermQueryTestCase<QB extends BaseTermQueryBuilder<
|
||||||
} else {
|
} else {
|
||||||
value = testQuery.value();
|
value = testQuery.value();
|
||||||
}
|
}
|
||||||
String contentString = formatted("""
|
String contentString = Strings.format("""
|
||||||
{
|
{
|
||||||
"%s" : {
|
"%s" : {
|
||||||
"%s" : %s
|
"%s" : %s
|
||||||
|
|
|
@ -14,6 +14,7 @@ import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -103,7 +104,7 @@ public class CombinedFieldsQueryBuilderTests extends AbstractQueryTestCase<Combi
|
||||||
Object[] expectedValues = new Object[] { "2", "2", "2%", null };
|
Object[] expectedValues = new Object[] { "2", "2", "2%", null };
|
||||||
int i = 0;
|
int i = 0;
|
||||||
for (Object value : testValues) {
|
for (Object value : testValues) {
|
||||||
String json = formatted("""
|
String json = Strings.format("""
|
||||||
{
|
{
|
||||||
"combined_fields" : {
|
"combined_fields" : {
|
||||||
"query" : "quick brown fox",
|
"query" : "quick brown fox",
|
||||||
|
|
|
@ -12,6 +12,7 @@ import org.apache.lucene.search.ConstantScoreQuery;
|
||||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -62,7 +63,7 @@ public class ConstantScoreQueryBuilderTests extends AbstractQueryTestCase<Consta
|
||||||
* test that "filter" does not accept an array of queries, throws {@link ParsingException}
|
* test that "filter" does not accept an array of queries, throws {@link ParsingException}
|
||||||
*/
|
*/
|
||||||
public void testNoArrayAsFilterElements() throws IOException {
|
public void testNoArrayAsFilterElements() throws IOException {
|
||||||
String queryString = formatted("""
|
String queryString = Strings.format("""
|
||||||
{
|
{
|
||||||
"%s": {
|
"%s": {
|
||||||
"filter": [ { "term": { "foo": "a" } }, { "term": { "foo": "x" } } ]
|
"filter": [ { "term": { "foo": "a" } }, { "term": { "foo": "x" } } ]
|
||||||
|
|
|
@ -13,6 +13,7 @@ import org.apache.lucene.search.BoostQuery;
|
||||||
import org.apache.lucene.search.DisjunctionMaxQuery;
|
import org.apache.lucene.search.DisjunctionMaxQuery;
|
||||||
import org.apache.lucene.search.PrefixQuery;
|
import org.apache.lucene.search.PrefixQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -62,7 +63,7 @@ public class DisMaxQueryBuilderTests extends AbstractQueryTestCase<DisMaxQueryBu
|
||||||
QueryBuilder innerQuery = createTestQueryBuilder().innerQueries().get(0);
|
QueryBuilder innerQuery = createTestQueryBuilder().innerQueries().get(0);
|
||||||
DisMaxQueryBuilder expectedQuery = new DisMaxQueryBuilder();
|
DisMaxQueryBuilder expectedQuery = new DisMaxQueryBuilder();
|
||||||
expectedQuery.add(innerQuery);
|
expectedQuery.add(innerQuery);
|
||||||
String contentString = formatted("""
|
String contentString = Strings.format("""
|
||||||
{
|
{
|
||||||
"dis_max": {
|
"dis_max": {
|
||||||
"queries": %s
|
"queries": %s
|
||||||
|
@ -78,7 +79,7 @@ public class DisMaxQueryBuilderTests extends AbstractQueryTestCase<DisMaxQueryBu
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToQueryInnerPrefixQuery() throws Exception {
|
public void testToQueryInnerPrefixQuery() throws Exception {
|
||||||
String queryAsString = formatted("""
|
String queryAsString = Strings.format("""
|
||||||
{
|
{
|
||||||
"dis_max": {
|
"dis_max": {
|
||||||
"queries": [
|
"queries": [
|
||||||
|
|
|
@ -15,6 +15,7 @@ import org.elasticsearch.common.geo.GeoPoint;
|
||||||
import org.elasticsearch.common.geo.GeoUtils;
|
import org.elasticsearch.common.geo.GeoUtils;
|
||||||
import org.elasticsearch.common.lucene.search.Queries;
|
import org.elasticsearch.common.lucene.search.Queries;
|
||||||
import org.elasticsearch.common.unit.DistanceUnit;
|
import org.elasticsearch.common.unit.DistanceUnit;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.core.TimeValue;
|
import org.elasticsearch.core.TimeValue;
|
||||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
|
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
|
||||||
|
@ -88,7 +89,7 @@ public class DistanceFeatureQueryBuilderTests extends AbstractQueryTestCase<Dist
|
||||||
// origin as string
|
// origin as string
|
||||||
String origin = "2018-01-01T13:10:30Z";
|
String origin = "2018-01-01T13:10:30Z";
|
||||||
String pivot = "7d";
|
String pivot = "7d";
|
||||||
String json = formatted("""
|
String json = Strings.format("""
|
||||||
{
|
{
|
||||||
"distance_feature": {
|
"distance_feature": {
|
||||||
"field": "%s",
|
"field": "%s",
|
||||||
|
@ -105,7 +106,7 @@ public class DistanceFeatureQueryBuilderTests extends AbstractQueryTestCase<Dist
|
||||||
|
|
||||||
// origin as long
|
// origin as long
|
||||||
long originLong = 1514812230999L;
|
long originLong = 1514812230999L;
|
||||||
json = formatted("""
|
json = Strings.format("""
|
||||||
{
|
{
|
||||||
"distance_feature": {
|
"distance_feature": {
|
||||||
"field": "%s",
|
"field": "%s",
|
||||||
|
@ -122,7 +123,7 @@ public class DistanceFeatureQueryBuilderTests extends AbstractQueryTestCase<Dist
|
||||||
// origin as string
|
// origin as string
|
||||||
String origin = "2018-01-01T13:10:30.323456789Z";
|
String origin = "2018-01-01T13:10:30.323456789Z";
|
||||||
String pivot = "100000000nanos";
|
String pivot = "100000000nanos";
|
||||||
String json = formatted("""
|
String json = Strings.format("""
|
||||||
{
|
{
|
||||||
"distance_feature": {
|
"distance_feature": {
|
||||||
"field": "%s",
|
"field": "%s",
|
||||||
|
@ -139,7 +140,7 @@ public class DistanceFeatureQueryBuilderTests extends AbstractQueryTestCase<Dist
|
||||||
|
|
||||||
// origin as long
|
// origin as long
|
||||||
long originLong = 1514812230999L;
|
long originLong = 1514812230999L;
|
||||||
json = formatted("""
|
json = Strings.format("""
|
||||||
{
|
{
|
||||||
"distance_feature": {
|
"distance_feature": {
|
||||||
"field": "%s",
|
"field": "%s",
|
||||||
|
@ -157,7 +158,7 @@ public class DistanceFeatureQueryBuilderTests extends AbstractQueryTestCase<Dist
|
||||||
final String pivot = "1km";
|
final String pivot = "1km";
|
||||||
|
|
||||||
// origin as string
|
// origin as string
|
||||||
String json = formatted("""
|
String json = Strings.format("""
|
||||||
{
|
{
|
||||||
"distance_feature": {
|
"distance_feature": {
|
||||||
"field": "%s",
|
"field": "%s",
|
||||||
|
@ -173,7 +174,7 @@ public class DistanceFeatureQueryBuilderTests extends AbstractQueryTestCase<Dist
|
||||||
assertEquals(json, 2.0, parsed.boost(), 0.0001);
|
assertEquals(json, 2.0, parsed.boost(), 0.0001);
|
||||||
|
|
||||||
// origin as array
|
// origin as array
|
||||||
json = formatted("""
|
json = Strings.format("""
|
||||||
{
|
{
|
||||||
"distance_feature": {
|
"distance_feature": {
|
||||||
"field": "%s",
|
"field": "%s",
|
||||||
|
@ -186,7 +187,7 @@ public class DistanceFeatureQueryBuilderTests extends AbstractQueryTestCase<Dist
|
||||||
assertEquals(json, origin, parsed.origin().origin());
|
assertEquals(json, origin, parsed.origin().origin());
|
||||||
|
|
||||||
// origin as object
|
// origin as object
|
||||||
json = formatted("""
|
json = Strings.format("""
|
||||||
{
|
{
|
||||||
"distance_feature": {
|
"distance_feature": {
|
||||||
"field": "%s",
|
"field": "%s",
|
||||||
|
@ -217,7 +218,7 @@ public class DistanceFeatureQueryBuilderTests extends AbstractQueryTestCase<Dist
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testQueryFailsWithWrongFieldType() {
|
public void testQueryFailsWithWrongFieldType() {
|
||||||
String query = formatted("""
|
String query = Strings.format("""
|
||||||
{
|
{
|
||||||
"distance_feature": {
|
"distance_feature": {
|
||||||
"field": "%s",
|
"field": "%s",
|
||||||
|
|
|
@ -14,6 +14,7 @@ import org.apache.lucene.queries.spans.SpanTermQuery;
|
||||||
import org.apache.lucene.search.BoostQuery;
|
import org.apache.lucene.search.BoostQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -63,7 +64,7 @@ public class FieldMaskingSpanQueryBuilderTests extends AbstractQueryTestCase<Fie
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFromJson() throws IOException {
|
public void testFromJson() throws IOException {
|
||||||
String json = formatted("""
|
String json = Strings.format("""
|
||||||
{
|
{
|
||||||
"%s" : {
|
"%s" : {
|
||||||
"query" : {
|
"query" : {
|
||||||
|
@ -87,7 +88,7 @@ public class FieldMaskingSpanQueryBuilderTests extends AbstractQueryTestCase<Fie
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testJsonWithTopLevelBoost() throws IOException {
|
public void testJsonWithTopLevelBoost() throws IOException {
|
||||||
String json = formatted("""
|
String json = Strings.format("""
|
||||||
{
|
{
|
||||||
"%s" : {
|
"%s" : {
|
||||||
"query" : {
|
"query" : {
|
||||||
|
|
|
@ -15,6 +15,7 @@ import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.unit.Fuzziness;
|
import org.elasticsearch.common.unit.Fuzziness;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -52,7 +53,7 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
||||||
protected Map<String, FuzzyQueryBuilder> getAlternateVersions() {
|
protected Map<String, FuzzyQueryBuilder> getAlternateVersions() {
|
||||||
Map<String, FuzzyQueryBuilder> alternateVersions = new HashMap<>();
|
Map<String, FuzzyQueryBuilder> alternateVersions = new HashMap<>();
|
||||||
FuzzyQueryBuilder fuzzyQuery = new FuzzyQueryBuilder(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10));
|
FuzzyQueryBuilder fuzzyQuery = new FuzzyQueryBuilder(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10));
|
||||||
String contentString = formatted("""
|
String contentString = Strings.format("""
|
||||||
{
|
{
|
||||||
"fuzzy" : {
|
"fuzzy" : {
|
||||||
"%s" : "%s"
|
"%s" : "%s"
|
||||||
|
@ -84,7 +85,7 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToQueryWithStringField() throws IOException {
|
public void testToQueryWithStringField() throws IOException {
|
||||||
String query = formatted("""
|
String query = Strings.format("""
|
||||||
{
|
{
|
||||||
"fuzzy":{
|
"fuzzy":{
|
||||||
"%s":{
|
"%s":{
|
||||||
|
@ -107,7 +108,7 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToQueryWithStringFieldDefinedFuzziness() throws IOException {
|
public void testToQueryWithStringFieldDefinedFuzziness() throws IOException {
|
||||||
String query = formatted("""
|
String query = Strings.format("""
|
||||||
{
|
{
|
||||||
"fuzzy":{
|
"fuzzy":{
|
||||||
"%s":{
|
"%s":{
|
||||||
|
@ -130,7 +131,7 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToQueryWithStringFieldDefinedWrongFuzziness() throws IOException {
|
public void testToQueryWithStringFieldDefinedWrongFuzziness() throws IOException {
|
||||||
String queryMissingFuzzinessUpLimit = formatted("""
|
String queryMissingFuzzinessUpLimit = Strings.format("""
|
||||||
{
|
{
|
||||||
"fuzzy":{
|
"fuzzy":{
|
||||||
"%s":{
|
"%s":{
|
||||||
|
@ -148,7 +149,7 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
||||||
String msg = "failed to find low and high distance values";
|
String msg = "failed to find low and high distance values";
|
||||||
assertTrue(e.getMessage() + " didn't contain: " + msg + " but: " + e.getMessage(), e.getMessage().contains(msg));
|
assertTrue(e.getMessage() + " didn't contain: " + msg + " but: " + e.getMessage(), e.getMessage().contains(msg));
|
||||||
|
|
||||||
String queryHavingNegativeFuzzinessLowLimit = formatted("""
|
String queryHavingNegativeFuzzinessLowLimit = Strings.format("""
|
||||||
{
|
{
|
||||||
"fuzzy":{
|
"fuzzy":{
|
||||||
"%s":{
|
"%s":{
|
||||||
|
@ -166,7 +167,7 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
||||||
);
|
);
|
||||||
assertTrue(e2.getMessage() + " didn't contain: " + msg2 + " but: " + e.getMessage(), e.getMessage().contains(msg));
|
assertTrue(e2.getMessage() + " didn't contain: " + msg2 + " but: " + e.getMessage(), e.getMessage().contains(msg));
|
||||||
|
|
||||||
String queryMissingFuzzinessUpLimit2 = formatted("""
|
String queryMissingFuzzinessUpLimit2 = Strings.format("""
|
||||||
{
|
{
|
||||||
"fuzzy":{
|
"fuzzy":{
|
||||||
"%s":{
|
"%s":{
|
||||||
|
@ -183,7 +184,7 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
||||||
);
|
);
|
||||||
assertTrue(e.getMessage() + " didn't contain: " + msg + " but: " + e.getMessage(), e.getMessage().contains(msg));
|
assertTrue(e.getMessage() + " didn't contain: " + msg + " but: " + e.getMessage(), e.getMessage().contains(msg));
|
||||||
|
|
||||||
String queryMissingFuzzinessLowLimit = formatted("""
|
String queryMissingFuzzinessLowLimit = Strings.format("""
|
||||||
{
|
{
|
||||||
"fuzzy":{
|
"fuzzy":{
|
||||||
"%s":{
|
"%s":{
|
||||||
|
@ -203,7 +204,7 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToQueryWithNumericField() throws IOException {
|
public void testToQueryWithNumericField() throws IOException {
|
||||||
String query = formatted("""
|
String query = Strings.format("""
|
||||||
{
|
{
|
||||||
"fuzzy":{
|
"fuzzy":{
|
||||||
"%s":{
|
"%s":{
|
||||||
|
|
|
@ -15,6 +15,7 @@ import org.apache.lucene.search.MatchNoDocsQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.geo.GeometryTestUtils;
|
import org.elasticsearch.geo.GeometryTestUtils;
|
||||||
import org.elasticsearch.geometry.LinearRing;
|
import org.elasticsearch.geometry.LinearRing;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
|
@ -161,7 +162,7 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParsingAndToQuery1() throws IOException {
|
public void testParsingAndToQuery1() throws IOException {
|
||||||
String query = formatted("""
|
String query = Strings.format("""
|
||||||
{
|
{
|
||||||
"geo_polygon": {
|
"geo_polygon": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -178,7 +179,7 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParsingAndToQuery2() throws IOException {
|
public void testParsingAndToQuery2() throws IOException {
|
||||||
String query = formatted("""
|
String query = Strings.format("""
|
||||||
{
|
{
|
||||||
"geo_polygon": {
|
"geo_polygon": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -191,7 +192,7 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParsingAndToQuery3() throws IOException {
|
public void testParsingAndToQuery3() throws IOException {
|
||||||
String query = formatted("""
|
String query = Strings.format("""
|
||||||
{
|
{
|
||||||
"geo_polygon": {
|
"geo_polygon": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -204,7 +205,7 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParsingAndToQuery4() throws IOException {
|
public void testParsingAndToQuery4() throws IOException {
|
||||||
String query = formatted("""
|
String query = Strings.format("""
|
||||||
{
|
{
|
||||||
"geo_polygon": {
|
"geo_polygon": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -256,7 +257,7 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
||||||
|
|
||||||
public void testPointValidation() throws IOException {
|
public void testPointValidation() throws IOException {
|
||||||
SearchExecutionContext context = createSearchExecutionContext();
|
SearchExecutionContext context = createSearchExecutionContext();
|
||||||
String queryInvalidLat = formatted("""
|
String queryInvalidLat = Strings.format("""
|
||||||
{
|
{
|
||||||
"geo_polygon": {
|
"geo_polygon": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -272,7 +273,7 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
||||||
QueryShardException e1 = expectThrows(QueryShardException.class, () -> parseQuery(queryInvalidLat).toQuery(context));
|
QueryShardException e1 = expectThrows(QueryShardException.class, () -> parseQuery(queryInvalidLat).toQuery(context));
|
||||||
assertThat(e1.getMessage(), containsString("illegal latitude value [140.0] for [geo_polygon]"));
|
assertThat(e1.getMessage(), containsString("illegal latitude value [140.0] for [geo_polygon]"));
|
||||||
|
|
||||||
String queryInvalidLon = formatted("""
|
String queryInvalidLon = Strings.format("""
|
||||||
{
|
{
|
||||||
"geo_polygon": {
|
"geo_polygon": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
|
|
@ -180,7 +180,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
|
|
||||||
public void testMatchInterval() throws IOException {
|
public void testMatchInterval() throws IOException {
|
||||||
|
|
||||||
String json = formatted("""
|
String json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -196,7 +196,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
|
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
json = formatted("""
|
json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -215,7 +215,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
);
|
);
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
json = formatted("""
|
json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -235,7 +235,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
);
|
);
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
json = formatted("""
|
json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -256,7 +256,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
);
|
);
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
json = formatted("""
|
json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -278,7 +278,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
);
|
);
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
json = formatted("""
|
json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -312,7 +312,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
|
|
||||||
public void testOrInterval() throws IOException {
|
public void testOrInterval() throws IOException {
|
||||||
|
|
||||||
String json = formatted("""
|
String json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -337,7 +337,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.or(Intervals.term("one"), Intervals.term("two")));
|
Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.or(Intervals.term("one"), Intervals.term("two")));
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
json = formatted("""
|
json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -375,7 +375,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
|
|
||||||
public void testCombineInterval() throws IOException {
|
public void testCombineInterval() throws IOException {
|
||||||
|
|
||||||
String json = formatted("""
|
String json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -427,7 +427,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCombineDisjunctionInterval() throws IOException {
|
public void testCombineDisjunctionInterval() throws IOException {
|
||||||
String json = formatted("""
|
String json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -503,7 +503,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
});
|
});
|
||||||
assertThat(e.getMessage(), equalTo("Cannot create intervals over field [" + NO_POSITIONS_FIELD + "] with no positions indexed"));
|
assertThat(e.getMessage(), equalTo("Cannot create intervals over field [" + NO_POSITIONS_FIELD + "] with no positions indexed"));
|
||||||
|
|
||||||
String json = formatted("""
|
String json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -526,7 +526,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMultipleProviders() {
|
public void testMultipleProviders() {
|
||||||
String json = formatted("""
|
String json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -566,7 +566,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
String json = formatted("""
|
String json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -594,7 +594,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
|
|
||||||
public void testPrefixes() throws IOException {
|
public void testPrefixes() throws IOException {
|
||||||
|
|
||||||
String json = formatted("""
|
String json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -608,7 +608,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.prefix(new BytesRef("term")));
|
Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.prefix(new BytesRef("term")));
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
String no_positions_json = formatted("""
|
String no_positions_json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -623,7 +623,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
builder1.toQuery(createSearchExecutionContext());
|
builder1.toQuery(createSearchExecutionContext());
|
||||||
});
|
});
|
||||||
|
|
||||||
String no_positions_fixed_field_json = formatted("""
|
String no_positions_fixed_field_json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -639,7 +639,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
builder1.toQuery(createSearchExecutionContext());
|
builder1.toQuery(createSearchExecutionContext());
|
||||||
});
|
});
|
||||||
|
|
||||||
String prefix_json = formatted("""
|
String prefix_json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -653,7 +653,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
expected = new IntervalQuery(PREFIXED_FIELD, Intervals.fixField(PREFIXED_FIELD + "._index_prefix", Intervals.term("term")));
|
expected = new IntervalQuery(PREFIXED_FIELD, Intervals.fixField(PREFIXED_FIELD + "._index_prefix", Intervals.term("term")));
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
String short_prefix_json = formatted("""
|
String short_prefix_json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -670,7 +670,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
);
|
);
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
String fix_field_prefix_json = formatted("""
|
String fix_field_prefix_json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -689,7 +689,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
);
|
);
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
String keyword_json = formatted("""
|
String keyword_json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -704,7 +704,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
expected = new IntervalQuery(PREFIXED_FIELD, Intervals.fixField(PREFIXED_FIELD + "._index_prefix", Intervals.term("Term")));
|
expected = new IntervalQuery(PREFIXED_FIELD, Intervals.fixField(PREFIXED_FIELD + "._index_prefix", Intervals.term("Term")));
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
String keyword_fix_field_json = formatted("""
|
String keyword_fix_field_json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -727,7 +727,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
|
|
||||||
public void testWildcard() throws IOException {
|
public void testWildcard() throws IOException {
|
||||||
|
|
||||||
String json = formatted("""
|
String json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -742,7 +742,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.wildcard(new BytesRef("te?m")));
|
Query expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.wildcard(new BytesRef("te?m")));
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
String no_positions_json = formatted("""
|
String no_positions_json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -758,7 +758,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
builder1.toQuery(createSearchExecutionContext());
|
builder1.toQuery(createSearchExecutionContext());
|
||||||
});
|
});
|
||||||
|
|
||||||
String keyword_json = formatted("""
|
String keyword_json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -774,7 +774,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.wildcard(new BytesRef("Te?m")));
|
expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.wildcard(new BytesRef("Te?m")));
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
String fixed_field_json = formatted("""
|
String fixed_field_json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -790,7 +790,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.fixField(MASKED_FIELD, Intervals.wildcard(new BytesRef("te?m"))));
|
expected = new IntervalQuery(TEXT_FIELD_NAME, Intervals.fixField(MASKED_FIELD, Intervals.wildcard(new BytesRef("te?m"))));
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
String fixed_field_json_no_positions = formatted("""
|
String fixed_field_json_no_positions = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -806,7 +806,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
builder1.toQuery(createSearchExecutionContext());
|
builder1.toQuery(createSearchExecutionContext());
|
||||||
});
|
});
|
||||||
|
|
||||||
String fixed_field_analyzer_json = formatted("""
|
String fixed_field_analyzer_json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -831,7 +831,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
|
|
||||||
public void testFuzzy() throws IOException {
|
public void testFuzzy() throws IOException {
|
||||||
|
|
||||||
String json = formatted("""
|
String json = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -849,7 +849,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
);
|
);
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
String json_with_prefix = formatted("""
|
String json_with_prefix = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -864,7 +864,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
expected = new IntervalQuery(TEXT_FIELD_NAME, buildFuzzySource("term", "term", 2, true, Fuzziness.AUTO.asDistance("term")));
|
expected = new IntervalQuery(TEXT_FIELD_NAME, buildFuzzySource("term", "term", 2, true, Fuzziness.AUTO.asDistance("term")));
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
String json_with_fuzziness = formatted("""
|
String json_with_fuzziness = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -880,7 +880,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
expected = new IntervalQuery(TEXT_FIELD_NAME, buildFuzzySource("term", "term", 2, true, Fuzziness.ONE.asDistance("term")));
|
expected = new IntervalQuery(TEXT_FIELD_NAME, buildFuzzySource("term", "term", 2, true, Fuzziness.ONE.asDistance("term")));
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
String json_no_transpositions = formatted("""
|
String json_no_transpositions = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -896,7 +896,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
expected = new IntervalQuery(TEXT_FIELD_NAME, buildFuzzySource("term", "term", 2, false, Fuzziness.AUTO.asDistance("term")));
|
expected = new IntervalQuery(TEXT_FIELD_NAME, buildFuzzySource("term", "term", 2, false, Fuzziness.AUTO.asDistance("term")));
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
String json_with_analyzer = formatted("""
|
String json_with_analyzer = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
@ -912,7 +912,7 @@ public class IntervalQueryBuilderTests extends AbstractQueryTestCase<IntervalQue
|
||||||
expected = new IntervalQuery(TEXT_FIELD_NAME, buildFuzzySource("Term", "Term", 2, true, Fuzziness.AUTO.asDistance("term")));
|
expected = new IntervalQuery(TEXT_FIELD_NAME, buildFuzzySource("Term", "Term", 2, true, Fuzziness.AUTO.asDistance("term")));
|
||||||
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
assertEquals(expected, builder.toQuery(createSearchExecutionContext()));
|
||||||
|
|
||||||
String json_with_fixfield = formatted("""
|
String json_with_fixfield = Strings.format("""
|
||||||
{
|
{
|
||||||
"intervals": {
|
"intervals": {
|
||||||
"%s": {
|
"%s": {
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue