mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-06-29 09:54:06 -04:00
When storing nanoseconds on a date field the nanosecond part is lost as it cannot be stored. The date_nanos field should be used instead. This commit emits a deprecation warning to notify users about this. closes #37962 backports #78921
This commit is contained in:
parent
b7968e6737
commit
ff6e5899b2
13 changed files with 101 additions and 34 deletions
|
@ -219,7 +219,7 @@ public class IndexingIT extends AbstractRollingTestCase {
|
|||
Request index = new Request("POST", "/" + indexName + "/_doc/");
|
||||
XContentBuilder doc = XContentBuilder.builder(XContentType.JSON.xContent())
|
||||
.startObject()
|
||||
.field("date", "2015-01-01T12:10:30.123456789Z")
|
||||
.field("date", "2015-01-01T12:10:30.123Z")
|
||||
.field("date_nanos", "2015-01-01T12:10:30.123456789Z")
|
||||
.endObject();
|
||||
index.addParameter("refresh", "true");
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
index: timetest
|
||||
body:
|
||||
mappings:
|
||||
"properties": { "my_time": {"type": "date", "format": "strict_date_optional_time_nanos"}}
|
||||
"properties": { "my_time": {"type": "date_nanos", "format": "strict_date_optional_time_nanos"}}
|
||||
|
||||
- do:
|
||||
ingest.put_pipeline:
|
||||
|
|
|
@ -8,7 +8,7 @@ setup:
|
|||
mappings:
|
||||
properties:
|
||||
mydate:
|
||||
type: date
|
||||
type: date_nanos
|
||||
format: "uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSZZZZZ"
|
||||
|
||||
- do:
|
||||
|
|
|
@ -75,6 +75,7 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
public static final DateFormatter DEFAULT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time||epoch_millis");
|
||||
public static final DateFormatter DEFAULT_DATE_TIME_NANOS_FORMATTER =
|
||||
DateFormatter.forPattern("strict_date_optional_time_nanos||epoch_millis");
|
||||
private final String indexName;
|
||||
|
||||
public enum Resolution {
|
||||
MILLISECONDS(CONTENT_TYPE, NumericType.DATE) {
|
||||
|
@ -234,6 +235,7 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
private final Parameter<String> nullValue
|
||||
= Parameter.stringParam("null_value", false, m -> toType(m).nullValueAsString, null).acceptsNull();
|
||||
private final Parameter<Boolean> ignoreMalformed;
|
||||
private String indexName;
|
||||
|
||||
private final Parameter<Script> script = Parameter.scriptParam(m -> toType(m).script);
|
||||
private final Parameter<String> onScriptError = Parameter.onScriptErrorParam(m -> toType(m).onScriptError, script);
|
||||
|
@ -244,13 +246,14 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
|
||||
public Builder(String name, Resolution resolution, DateFormatter dateFormatter,
|
||||
ScriptCompiler scriptCompiler,
|
||||
boolean ignoreMalformedByDefault, Version indexCreatedVersion) {
|
||||
boolean ignoreMalformedByDefault, Version indexCreatedVersion, String indexName) {
|
||||
super(name);
|
||||
this.resolution = resolution;
|
||||
this.indexCreatedVersion = indexCreatedVersion;
|
||||
this.scriptCompiler = Objects.requireNonNull(scriptCompiler);
|
||||
this.ignoreMalformed
|
||||
= Parameter.boolParam("ignore_malformed", true, m -> toType(m).ignoreMalformed, ignoreMalformedByDefault);
|
||||
this.indexName = indexName;
|
||||
|
||||
DateFormatter defaultFormat = resolution == Resolution.NANOSECONDS && indexCreatedVersion.onOrAfter(Version.V_7_0_0) ?
|
||||
DEFAULT_DATE_TIME_NANOS_FORMATTER : DEFAULT_DATE_TIME_FORMATTER;
|
||||
|
@ -292,12 +295,13 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
return Arrays.asList(index, docValues, store, format, locale, nullValue, ignoreMalformed, script, onScriptError, boost, meta);
|
||||
}
|
||||
|
||||
private Long parseNullValue(DateFieldType fieldType) {
|
||||
private Long parseNullValue(DateFieldType fieldType, String indexName) {
|
||||
if (nullValue.getValue() == null) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return fieldType.parse(nullValue.getValue());
|
||||
final String fieldName = fieldType.name();
|
||||
return fieldType.parseNullValueWithDeprecation(nullValue.getValue(), fieldName, indexName);
|
||||
} catch (Exception e) {
|
||||
DEPRECATION_LOGGER.critical(DeprecationCategory.MAPPINGS, "date_mapper_null_field",
|
||||
"Error parsing [" + nullValue.getValue() + "] as date in [null_value] on field [" + name() + "]);"
|
||||
|
@ -311,7 +315,7 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
DateFieldType ft = new DateFieldType(context.buildFullName(name()), index.getValue(), store.getValue(), docValues.getValue(),
|
||||
buildFormatter(), resolution, nullValue.getValue(), scriptValues(), meta.getValue());
|
||||
ft.setBoost(boost.getValue());
|
||||
Long nullTimestamp = parseNullValue(ft);
|
||||
Long nullTimestamp = parseNullValue(ft, indexName);
|
||||
return new DateFieldMapper(name, ft, multiFieldsBuilder.build(this, context),
|
||||
copyTo.build(), nullTimestamp, resolution, this);
|
||||
}
|
||||
|
@ -320,13 +324,15 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
public static final TypeParser MILLIS_PARSER = new TypeParser((n, c) -> {
|
||||
boolean ignoreMalformedByDefault = IGNORE_MALFORMED_SETTING.get(c.getSettings());
|
||||
return new Builder(n, Resolution.MILLISECONDS, c.getDateFormatter(), c.scriptCompiler(),
|
||||
ignoreMalformedByDefault, c.indexVersionCreated());
|
||||
ignoreMalformedByDefault, c.indexVersionCreated(),
|
||||
c.getIndexSettings() != null ? c.getIndexSettings().getIndex().getName() : null);
|
||||
});
|
||||
|
||||
public static final TypeParser NANOS_PARSER = new TypeParser((n, c) -> {
|
||||
boolean ignoreMalformedByDefault = IGNORE_MALFORMED_SETTING.get(c.getSettings());
|
||||
return new Builder(n, Resolution.NANOSECONDS, c.getDateFormatter(), c.scriptCompiler(),
|
||||
ignoreMalformedByDefault, c.indexVersionCreated());
|
||||
ignoreMalformedByDefault, c.indexVersionCreated(),
|
||||
c.getIndexSettings() != null ? c.getIndexSettings().getIndex().getName() : null);
|
||||
});
|
||||
|
||||
public static final class DateFieldType extends MappedFieldType {
|
||||
|
@ -382,7 +388,31 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
|
||||
// Visible for testing.
|
||||
public long parse(String value) {
|
||||
return resolution.convert(DateFormatters.from(dateTimeFormatter().parse(value), dateTimeFormatter().locale()).toInstant());
|
||||
final Instant instant = getInstant(value);
|
||||
return resolution.convert(instant);
|
||||
}
|
||||
|
||||
public long parseWithDeprecation(String value, String fieldName, String indexName) {
|
||||
final Instant instant = getInstant(value);
|
||||
if (resolution == Resolution.MILLISECONDS && instant.getNano() % 1000000 != 0) {
|
||||
DEPRECATION_LOGGER.warn(DeprecationCategory.MAPPINGS, "date_field_with_nanos",
|
||||
"You are attempting to store a nanosecond resolution on a field [{}] of type date on index [{}]. " +
|
||||
"The nanosecond part was lost. Use date_nanos field type.", fieldName, indexName);
|
||||
}
|
||||
return resolution.convert(instant);
|
||||
}
|
||||
|
||||
public long parseNullValueWithDeprecation(String value, String fieldName, String indexName) {
|
||||
final Instant instant = getInstant(value);
|
||||
if (resolution == Resolution.MILLISECONDS && instant.getNano() % 1000000 != 0) {
|
||||
DEPRECATION_LOGGER.warn(DeprecationCategory.MAPPINGS, "date_field_with_nanos",
|
||||
"You are attempting to set null_value with a nanosecond resolution on a field [{}] of type date on index [{}]. " +
|
||||
"The nanosecond part was lost. Use date_nanos field type.", fieldName, indexName);
|
||||
}
|
||||
return resolution.convert(instant);
|
||||
}
|
||||
private Instant getInstant(String value) {
|
||||
return DateFormatters.from(dateTimeFormatter().parse(value), dateTimeFormatter().locale()).toInstant();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -666,11 +696,13 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
this.script = builder.script.get();
|
||||
this.scriptCompiler = builder.scriptCompiler;
|
||||
this.scriptValues = builder.scriptValues();
|
||||
this.indexName = builder.indexName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldMapper.Builder getMergeBuilder() {
|
||||
return new Builder(simpleName(), resolution, null, scriptCompiler, ignoreMalformedByDefault, indexCreatedVersion).init(this);
|
||||
return new Builder(simpleName(), resolution, null, scriptCompiler, ignoreMalformedByDefault, indexCreatedVersion, indexName)
|
||||
.init(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -695,7 +727,9 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
timestamp = nullValue;
|
||||
} else {
|
||||
try {
|
||||
timestamp = fieldType().parse(dateAsString);
|
||||
final String fieldName = fieldType().name();
|
||||
final String indexName = context.indexSettings().getIndex().getName();
|
||||
timestamp = fieldType().parseWithDeprecation(dateAsString, fieldName, indexName);
|
||||
} catch (IllegalArgumentException | ElasticsearchParseException | DateTimeException | ArithmeticException e) {
|
||||
if (ignoreMalformed) {
|
||||
context.addIgnoredField(mappedFieldType.name());
|
||||
|
|
|
@ -300,7 +300,8 @@ final class DynamicFieldsBuilder {
|
|||
Settings settings = context.indexSettings().getSettings();
|
||||
boolean ignoreMalformed = FieldMapper.IGNORE_MALFORMED_SETTING.get(settings);
|
||||
createDynamicField(new DateFieldMapper.Builder(name, DateFieldMapper.Resolution.MILLISECONDS,
|
||||
dateTimeFormatter, ScriptCompiler.NONE, ignoreMalformed, context.indexSettings().getIndexVersionCreated()), context);
|
||||
dateTimeFormatter, ScriptCompiler.NONE, ignoreMalformed, context.indexSettings().getIndexVersionCreated(),
|
||||
context.indexSettings().getIndex().getName()), context);
|
||||
}
|
||||
|
||||
void newDynamicBinaryField(DocumentParserContext context, String name) throws IOException {
|
||||
|
|
|
@ -106,8 +106,7 @@ public final class MappingParser {
|
|||
}
|
||||
|
||||
MappingParserContext parserContext = parserContextSupplier.get();
|
||||
RootObjectMapper rootObjectMapper
|
||||
= rootObjectTypeParser.parse(type, mapping, parserContext).build(MapperBuilderContext.ROOT);
|
||||
RootObjectMapper rootObjectMapper = rootObjectTypeParser.parse(type, mapping, parserContext).build(MapperBuilderContext.ROOT);
|
||||
|
||||
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = metadataMappersFunction.apply(type);
|
||||
Map<String, Object> meta = null;
|
||||
|
|
|
@ -565,7 +565,7 @@ public class MetadataRolloverServiceTests extends ESTestCase {
|
|||
null,
|
||||
ScriptCompiler.NONE,
|
||||
false,
|
||||
Version.CURRENT).build(MapperBuilderContext.ROOT);
|
||||
Version.CURRENT, "indexName").build(MapperBuilderContext.ROOT);
|
||||
ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool);
|
||||
Environment env = mock(Environment.class);
|
||||
when(env.sharedDataFile()).thenReturn(null);
|
||||
|
@ -573,7 +573,7 @@ public class MetadataRolloverServiceTests extends ESTestCase {
|
|||
when(allocationService.reroute(any(ClusterState.class), any(String.class))).then(i -> i.getArguments()[0]);
|
||||
RootObjectMapper.Builder root = new RootObjectMapper.Builder("_doc");
|
||||
root.add(new DateFieldMapper.Builder(dataStream.getTimeStampField().getName(), DateFieldMapper.Resolution.MILLISECONDS,
|
||||
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, ScriptCompiler.NONE, true, Version.CURRENT));
|
||||
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, ScriptCompiler.NONE, true, Version.CURRENT, "indexName"));
|
||||
MetadataFieldMapper dtfm = getDataStreamTimestampFieldMapper();
|
||||
Mapping mapping = new Mapping(
|
||||
root.build(MapperBuilderContext.ROOT),
|
||||
|
|
|
@ -148,6 +148,17 @@ public class DateFieldMapperTests extends MapperTestCase {
|
|||
testIgnoreMalformedForValue("-522000000", "long overflow", "date_optional_time");
|
||||
}
|
||||
|
||||
public void testResolutionLossDeprecation() throws Exception {
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b
|
||||
.field("type", "date")));
|
||||
|
||||
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "2018-10-03T14:42:44.123456+0000")));
|
||||
|
||||
assertWarnings("You are attempting to store a nanosecond resolution " +
|
||||
"on a field [field] of type date on index [index]. " +
|
||||
"The nanosecond part was lost. Use date_nanos field type.");
|
||||
}
|
||||
|
||||
private void testIgnoreMalformedForValue(String value, String expectedCause, String dateFormat) throws IOException {
|
||||
|
||||
DocumentMapper mapper = createDocumentMapper(fieldMapping((builder)-> dateFieldMapping(builder, dateFormat)));
|
||||
|
@ -403,11 +414,11 @@ public class DateFieldMapperTests extends MapperTestCase {
|
|||
}
|
||||
|
||||
public void testFetchMillisFromIso8601Nanos() throws IOException {
|
||||
assertFetch(dateMapperService(), "field", randomIs8601Nanos(MAX_ISO_DATE), null);
|
||||
assertFetch(dateNanosMapperService(), "field", randomIs8601Nanos(MAX_NANOS), null);
|
||||
}
|
||||
|
||||
public void testFetchMillisFromIso8601NanosFormatted() throws IOException {
|
||||
assertFetch(dateMapperService(), "field", randomIs8601Nanos(MAX_ISO_DATE), "strict_date_optional_time_nanos");
|
||||
assertFetch(dateNanosMapperService(), "field", randomIs8601Nanos(MAX_NANOS), "strict_date_optional_time_nanos");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -418,7 +429,8 @@ public class DateFieldMapperTests extends MapperTestCase {
|
|||
* way.
|
||||
*/
|
||||
public void testFetchMillisFromRoundedNanos() throws IOException {
|
||||
assertFetch(dateMapperService(), "field", randomDecimalNanos(MAX_ISO_DATE), null);
|
||||
assertFetch(dateMapperService(), "field", randomDecimalMillis(MAX_ISO_DATE), null);
|
||||
assertFetch(dateNanosMapperService(), "field", randomDecimalNanos(MAX_NANOS), null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -531,7 +543,7 @@ public class DateFieldMapperTests extends MapperTestCase {
|
|||
switch (((DateFieldType) ft).resolution()) {
|
||||
case MILLISECONDS:
|
||||
if (randomBoolean()) {
|
||||
return randomIs8601Nanos(MAX_ISO_DATE);
|
||||
return randomDecimalMillis(MAX_ISO_DATE);
|
||||
}
|
||||
return randomLongBetween(0, Long.MAX_VALUE);
|
||||
case NANOSECONDS:
|
||||
|
@ -564,6 +576,10 @@ public class DateFieldMapperTests extends MapperTestCase {
|
|||
return Long.toString(randomLongBetween(0, maxMillis)) + "." + between(0, 999999);
|
||||
}
|
||||
|
||||
private String randomDecimalMillis(long maxMillis) {
|
||||
return Long.toString(randomLongBetween(0, maxMillis));
|
||||
}
|
||||
|
||||
public void testScriptAndPrecludedParameters() {
|
||||
{
|
||||
Exception e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> {
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.client.Response;
|
|||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
|
@ -101,7 +102,9 @@ public class AutoCreateDataStreamIT extends ESRestTestCase {
|
|||
|
||||
private Response indexDocument() throws IOException {
|
||||
final Request indexDocumentRequest = new Request("POST", "recipe_kr/_doc");
|
||||
indexDocumentRequest.setJsonEntity("{ \"@timestamp\": \"" + Instant.now() + "\", \"name\": \"Kimchi\" }");
|
||||
final Instant now = Instant.now();
|
||||
final String time = DateFormatter.forPattern("strict_date_optional_time").format(now);
|
||||
indexDocumentRequest.setJsonEntity("{ \"@timestamp\": \"" + time + "\", \"name\": \"Kimchi\" }");
|
||||
return client().performRequest(indexDocumentRequest);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,6 +12,8 @@ import org.elasticsearch.client.Response;
|
|||
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
|
||||
import java.time.ZoneOffset;
|
||||
|
@ -280,7 +282,7 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
|||
|
||||
for (int i = 1; i <= 100; i++) {
|
||||
ZonedDateTime time = baseTime.plusHours(i);
|
||||
String formattedTime = time.format(DateTimeFormatter.ISO_DATE_TIME);
|
||||
String formattedTime = DateFormatter.forPattern("strict_date_optional_time").format(time);
|
||||
if (i % 50 == 0) {
|
||||
// Anomaly has 100 docs, but we don't care about the value
|
||||
for (int j = 0; j < 100; j++) {
|
||||
|
|
|
@ -210,8 +210,8 @@ teardown:
|
|||
test_alias: {}
|
||||
mappings:
|
||||
properties:
|
||||
time:
|
||||
type: date
|
||||
date:
|
||||
type: date_nanos
|
||||
user:
|
||||
type: keyword
|
||||
stars:
|
||||
|
|
|
@ -48,8 +48,8 @@ teardown:
|
|||
test_alias: {}
|
||||
mappings:
|
||||
properties:
|
||||
time:
|
||||
type: date
|
||||
date:
|
||||
type: date_nanos
|
||||
user:
|
||||
type: keyword
|
||||
stars:
|
||||
|
@ -107,8 +107,8 @@ teardown:
|
|||
test_alias: {}
|
||||
mappings:
|
||||
properties:
|
||||
time:
|
||||
type: date
|
||||
date:
|
||||
type: date_nanos
|
||||
user:
|
||||
type: keyword
|
||||
stars:
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.elasticsearch.client.transform.transforms.TransformStats;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.xcontent.XContentBuilder;
|
||||
|
@ -52,6 +53,7 @@ import java.text.DecimalFormat;
|
|||
import java.text.DecimalFormatSymbols;
|
||||
import java.time.Instant;
|
||||
import java.time.ZoneId;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
|
@ -188,7 +190,7 @@ public class TransformContinuousIT extends ESRestTestCase {
|
|||
for (int i = 0; i < 100; i++) {
|
||||
dates.add(
|
||||
// create a random date between 1/1/2001 and 1/1/2006
|
||||
ContinuousTestCase.STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS.withZone(ZoneId.of("UTC"))
|
||||
formatTimestmap(dateType)
|
||||
.format(Instant.ofEpochMilli(randomLongBetween(978307200000L, 1136073600000L)))
|
||||
);
|
||||
}
|
||||
|
@ -243,16 +245,18 @@ public class TransformContinuousIT extends ESRestTestCase {
|
|||
}
|
||||
|
||||
// simulate a different timestamp that is off from the timestamp used for sync, so it can fall into the previous bucket
|
||||
String metricDateString = ContinuousTestCase.STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS.withZone(ZoneId.of("UTC"))
|
||||
String metricDateString = formatTimestmap(dateType)
|
||||
.format(runDate.minusSeconds(randomIntBetween(0, 2)).plusNanos(randomIntBetween(0, 999999)));
|
||||
source.append("\"metric-timestamp\":\"").append(metricDateString).append("\",");
|
||||
|
||||
String dateString = ContinuousTestCase.STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS.withZone(ZoneId.of("UTC"))
|
||||
.format(runDate.plusNanos(randomIntBetween(0, 999999)));
|
||||
final Instant timestamp = runDate.plusNanos(randomIntBetween(0, 999999));
|
||||
String dateString = formatTimestmap(dateType)
|
||||
.format(timestamp);
|
||||
|
||||
source.append("\"timestamp\":\"").append(dateString).append("\",");
|
||||
// for data streams
|
||||
source.append("\"@timestamp\":\"").append(dateString).append("\",");
|
||||
//dynamic field results in a date type
|
||||
source.append("\"@timestamp\":\"").append(formatTimestmap("date").format(timestamp)).append("\",");
|
||||
source.append("\"run\":").append(run);
|
||||
source.append("}");
|
||||
|
||||
|
@ -295,6 +299,14 @@ public class TransformContinuousIT extends ESRestTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private DateFormatter formatTimestmap(String dateType) {
|
||||
if(dateType == "date_nanos"){
|
||||
return DateFormatter.forPattern("strict_date_optional_time_nanos").withZone(ZoneId.of("UTC"));
|
||||
} else {
|
||||
return DateFormatter.forPattern("strict_date_optional_time").withZone(ZoneId.of("UTC"));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the transform source index with randomized settings to increase test coverage, for example
|
||||
* index sorting, triggers query optimizations.
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue