Deprecate resolution loss on date field (#78921) backport(#79355)

When storing nanoseconds on a date field the nanosecond part is lost as
it cannot be stored. The date_nanos field should be used instead.
This commit emits a deprecation warning to notify users about this.

closes #37962
backports #78921
This commit is contained in:
Przemyslaw Gomulka 2021-10-18 16:52:20 +02:00 committed by GitHub
parent b7968e6737
commit ff6e5899b2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 101 additions and 34 deletions

View file

@ -219,7 +219,7 @@ public class IndexingIT extends AbstractRollingTestCase {
Request index = new Request("POST", "/" + indexName + "/_doc/"); Request index = new Request("POST", "/" + indexName + "/_doc/");
XContentBuilder doc = XContentBuilder.builder(XContentType.JSON.xContent()) XContentBuilder doc = XContentBuilder.builder(XContentType.JSON.xContent())
.startObject() .startObject()
.field("date", "2015-01-01T12:10:30.123456789Z") .field("date", "2015-01-01T12:10:30.123Z")
.field("date_nanos", "2015-01-01T12:10:30.123456789Z") .field("date_nanos", "2015-01-01T12:10:30.123456789Z")
.endObject(); .endObject();
index.addParameter("refresh", "true"); index.addParameter("refresh", "true");

View file

@ -9,7 +9,7 @@
index: timetest index: timetest
body: body:
mappings: mappings:
"properties": { "my_time": {"type": "date", "format": "strict_date_optional_time_nanos"}} "properties": { "my_time": {"type": "date_nanos", "format": "strict_date_optional_time_nanos"}}
- do: - do:
ingest.put_pipeline: ingest.put_pipeline:

View file

@ -8,7 +8,7 @@ setup:
mappings: mappings:
properties: properties:
mydate: mydate:
type: date type: date_nanos
format: "uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSZZZZZ" format: "uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSZZZZZ"
- do: - do:

View file

@ -75,6 +75,7 @@ public final class DateFieldMapper extends FieldMapper {
public static final DateFormatter DEFAULT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time||epoch_millis"); public static final DateFormatter DEFAULT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time||epoch_millis");
public static final DateFormatter DEFAULT_DATE_TIME_NANOS_FORMATTER = public static final DateFormatter DEFAULT_DATE_TIME_NANOS_FORMATTER =
DateFormatter.forPattern("strict_date_optional_time_nanos||epoch_millis"); DateFormatter.forPattern("strict_date_optional_time_nanos||epoch_millis");
private final String indexName;
public enum Resolution { public enum Resolution {
MILLISECONDS(CONTENT_TYPE, NumericType.DATE) { MILLISECONDS(CONTENT_TYPE, NumericType.DATE) {
@ -234,6 +235,7 @@ public final class DateFieldMapper extends FieldMapper {
private final Parameter<String> nullValue private final Parameter<String> nullValue
= Parameter.stringParam("null_value", false, m -> toType(m).nullValueAsString, null).acceptsNull(); = Parameter.stringParam("null_value", false, m -> toType(m).nullValueAsString, null).acceptsNull();
private final Parameter<Boolean> ignoreMalformed; private final Parameter<Boolean> ignoreMalformed;
private String indexName;
private final Parameter<Script> script = Parameter.scriptParam(m -> toType(m).script); private final Parameter<Script> script = Parameter.scriptParam(m -> toType(m).script);
private final Parameter<String> onScriptError = Parameter.onScriptErrorParam(m -> toType(m).onScriptError, script); private final Parameter<String> onScriptError = Parameter.onScriptErrorParam(m -> toType(m).onScriptError, script);
@ -244,13 +246,14 @@ public final class DateFieldMapper extends FieldMapper {
public Builder(String name, Resolution resolution, DateFormatter dateFormatter, public Builder(String name, Resolution resolution, DateFormatter dateFormatter,
ScriptCompiler scriptCompiler, ScriptCompiler scriptCompiler,
boolean ignoreMalformedByDefault, Version indexCreatedVersion) { boolean ignoreMalformedByDefault, Version indexCreatedVersion, String indexName) {
super(name); super(name);
this.resolution = resolution; this.resolution = resolution;
this.indexCreatedVersion = indexCreatedVersion; this.indexCreatedVersion = indexCreatedVersion;
this.scriptCompiler = Objects.requireNonNull(scriptCompiler); this.scriptCompiler = Objects.requireNonNull(scriptCompiler);
this.ignoreMalformed this.ignoreMalformed
= Parameter.boolParam("ignore_malformed", true, m -> toType(m).ignoreMalformed, ignoreMalformedByDefault); = Parameter.boolParam("ignore_malformed", true, m -> toType(m).ignoreMalformed, ignoreMalformedByDefault);
this.indexName = indexName;
DateFormatter defaultFormat = resolution == Resolution.NANOSECONDS && indexCreatedVersion.onOrAfter(Version.V_7_0_0) ? DateFormatter defaultFormat = resolution == Resolution.NANOSECONDS && indexCreatedVersion.onOrAfter(Version.V_7_0_0) ?
DEFAULT_DATE_TIME_NANOS_FORMATTER : DEFAULT_DATE_TIME_FORMATTER; DEFAULT_DATE_TIME_NANOS_FORMATTER : DEFAULT_DATE_TIME_FORMATTER;
@ -292,12 +295,13 @@ public final class DateFieldMapper extends FieldMapper {
return Arrays.asList(index, docValues, store, format, locale, nullValue, ignoreMalformed, script, onScriptError, boost, meta); return Arrays.asList(index, docValues, store, format, locale, nullValue, ignoreMalformed, script, onScriptError, boost, meta);
} }
private Long parseNullValue(DateFieldType fieldType) { private Long parseNullValue(DateFieldType fieldType, String indexName) {
if (nullValue.getValue() == null) { if (nullValue.getValue() == null) {
return null; return null;
} }
try { try {
return fieldType.parse(nullValue.getValue()); final String fieldName = fieldType.name();
return fieldType.parseNullValueWithDeprecation(nullValue.getValue(), fieldName, indexName);
} catch (Exception e) { } catch (Exception e) {
DEPRECATION_LOGGER.critical(DeprecationCategory.MAPPINGS, "date_mapper_null_field", DEPRECATION_LOGGER.critical(DeprecationCategory.MAPPINGS, "date_mapper_null_field",
"Error parsing [" + nullValue.getValue() + "] as date in [null_value] on field [" + name() + "]);" "Error parsing [" + nullValue.getValue() + "] as date in [null_value] on field [" + name() + "]);"
@ -311,7 +315,7 @@ public final class DateFieldMapper extends FieldMapper {
DateFieldType ft = new DateFieldType(context.buildFullName(name()), index.getValue(), store.getValue(), docValues.getValue(), DateFieldType ft = new DateFieldType(context.buildFullName(name()), index.getValue(), store.getValue(), docValues.getValue(),
buildFormatter(), resolution, nullValue.getValue(), scriptValues(), meta.getValue()); buildFormatter(), resolution, nullValue.getValue(), scriptValues(), meta.getValue());
ft.setBoost(boost.getValue()); ft.setBoost(boost.getValue());
Long nullTimestamp = parseNullValue(ft); Long nullTimestamp = parseNullValue(ft, indexName);
return new DateFieldMapper(name, ft, multiFieldsBuilder.build(this, context), return new DateFieldMapper(name, ft, multiFieldsBuilder.build(this, context),
copyTo.build(), nullTimestamp, resolution, this); copyTo.build(), nullTimestamp, resolution, this);
} }
@ -320,13 +324,15 @@ public final class DateFieldMapper extends FieldMapper {
public static final TypeParser MILLIS_PARSER = new TypeParser((n, c) -> { public static final TypeParser MILLIS_PARSER = new TypeParser((n, c) -> {
boolean ignoreMalformedByDefault = IGNORE_MALFORMED_SETTING.get(c.getSettings()); boolean ignoreMalformedByDefault = IGNORE_MALFORMED_SETTING.get(c.getSettings());
return new Builder(n, Resolution.MILLISECONDS, c.getDateFormatter(), c.scriptCompiler(), return new Builder(n, Resolution.MILLISECONDS, c.getDateFormatter(), c.scriptCompiler(),
ignoreMalformedByDefault, c.indexVersionCreated()); ignoreMalformedByDefault, c.indexVersionCreated(),
c.getIndexSettings() != null ? c.getIndexSettings().getIndex().getName() : null);
}); });
public static final TypeParser NANOS_PARSER = new TypeParser((n, c) -> { public static final TypeParser NANOS_PARSER = new TypeParser((n, c) -> {
boolean ignoreMalformedByDefault = IGNORE_MALFORMED_SETTING.get(c.getSettings()); boolean ignoreMalformedByDefault = IGNORE_MALFORMED_SETTING.get(c.getSettings());
return new Builder(n, Resolution.NANOSECONDS, c.getDateFormatter(), c.scriptCompiler(), return new Builder(n, Resolution.NANOSECONDS, c.getDateFormatter(), c.scriptCompiler(),
ignoreMalformedByDefault, c.indexVersionCreated()); ignoreMalformedByDefault, c.indexVersionCreated(),
c.getIndexSettings() != null ? c.getIndexSettings().getIndex().getName() : null);
}); });
public static final class DateFieldType extends MappedFieldType { public static final class DateFieldType extends MappedFieldType {
@ -382,7 +388,31 @@ public final class DateFieldMapper extends FieldMapper {
// Visible for testing. // Visible for testing.
public long parse(String value) { public long parse(String value) {
return resolution.convert(DateFormatters.from(dateTimeFormatter().parse(value), dateTimeFormatter().locale()).toInstant()); final Instant instant = getInstant(value);
return resolution.convert(instant);
}
public long parseWithDeprecation(String value, String fieldName, String indexName) {
final Instant instant = getInstant(value);
if (resolution == Resolution.MILLISECONDS && instant.getNano() % 1000000 != 0) {
DEPRECATION_LOGGER.warn(DeprecationCategory.MAPPINGS, "date_field_with_nanos",
"You are attempting to store a nanosecond resolution on a field [{}] of type date on index [{}]. " +
"The nanosecond part was lost. Use date_nanos field type.", fieldName, indexName);
}
return resolution.convert(instant);
}
public long parseNullValueWithDeprecation(String value, String fieldName, String indexName) {
final Instant instant = getInstant(value);
if (resolution == Resolution.MILLISECONDS && instant.getNano() % 1000000 != 0) {
DEPRECATION_LOGGER.warn(DeprecationCategory.MAPPINGS, "date_field_with_nanos",
"You are attempting to set null_value with a nanosecond resolution on a field [{}] of type date on index [{}]. " +
"The nanosecond part was lost. Use date_nanos field type.", fieldName, indexName);
}
return resolution.convert(instant);
}
private Instant getInstant(String value) {
return DateFormatters.from(dateTimeFormatter().parse(value), dateTimeFormatter().locale()).toInstant();
} }
/** /**
@ -666,11 +696,13 @@ public final class DateFieldMapper extends FieldMapper {
this.script = builder.script.get(); this.script = builder.script.get();
this.scriptCompiler = builder.scriptCompiler; this.scriptCompiler = builder.scriptCompiler;
this.scriptValues = builder.scriptValues(); this.scriptValues = builder.scriptValues();
this.indexName = builder.indexName;
} }
@Override @Override
public FieldMapper.Builder getMergeBuilder() { public FieldMapper.Builder getMergeBuilder() {
return new Builder(simpleName(), resolution, null, scriptCompiler, ignoreMalformedByDefault, indexCreatedVersion).init(this); return new Builder(simpleName(), resolution, null, scriptCompiler, ignoreMalformedByDefault, indexCreatedVersion, indexName)
.init(this);
} }
@Override @Override
@ -695,7 +727,9 @@ public final class DateFieldMapper extends FieldMapper {
timestamp = nullValue; timestamp = nullValue;
} else { } else {
try { try {
timestamp = fieldType().parse(dateAsString); final String fieldName = fieldType().name();
final String indexName = context.indexSettings().getIndex().getName();
timestamp = fieldType().parseWithDeprecation(dateAsString, fieldName, indexName);
} catch (IllegalArgumentException | ElasticsearchParseException | DateTimeException | ArithmeticException e) { } catch (IllegalArgumentException | ElasticsearchParseException | DateTimeException | ArithmeticException e) {
if (ignoreMalformed) { if (ignoreMalformed) {
context.addIgnoredField(mappedFieldType.name()); context.addIgnoredField(mappedFieldType.name());

View file

@ -300,7 +300,8 @@ final class DynamicFieldsBuilder {
Settings settings = context.indexSettings().getSettings(); Settings settings = context.indexSettings().getSettings();
boolean ignoreMalformed = FieldMapper.IGNORE_MALFORMED_SETTING.get(settings); boolean ignoreMalformed = FieldMapper.IGNORE_MALFORMED_SETTING.get(settings);
createDynamicField(new DateFieldMapper.Builder(name, DateFieldMapper.Resolution.MILLISECONDS, createDynamicField(new DateFieldMapper.Builder(name, DateFieldMapper.Resolution.MILLISECONDS,
dateTimeFormatter, ScriptCompiler.NONE, ignoreMalformed, context.indexSettings().getIndexVersionCreated()), context); dateTimeFormatter, ScriptCompiler.NONE, ignoreMalformed, context.indexSettings().getIndexVersionCreated(),
context.indexSettings().getIndex().getName()), context);
} }
void newDynamicBinaryField(DocumentParserContext context, String name) throws IOException { void newDynamicBinaryField(DocumentParserContext context, String name) throws IOException {

View file

@ -106,8 +106,7 @@ public final class MappingParser {
} }
MappingParserContext parserContext = parserContextSupplier.get(); MappingParserContext parserContext = parserContextSupplier.get();
RootObjectMapper rootObjectMapper RootObjectMapper rootObjectMapper = rootObjectTypeParser.parse(type, mapping, parserContext).build(MapperBuilderContext.ROOT);
= rootObjectTypeParser.parse(type, mapping, parserContext).build(MapperBuilderContext.ROOT);
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = metadataMappersFunction.apply(type); Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = metadataMappersFunction.apply(type);
Map<String, Object> meta = null; Map<String, Object> meta = null;

View file

@ -565,7 +565,7 @@ public class MetadataRolloverServiceTests extends ESTestCase {
null, null,
ScriptCompiler.NONE, ScriptCompiler.NONE,
false, false,
Version.CURRENT).build(MapperBuilderContext.ROOT); Version.CURRENT, "indexName").build(MapperBuilderContext.ROOT);
ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool); ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool);
Environment env = mock(Environment.class); Environment env = mock(Environment.class);
when(env.sharedDataFile()).thenReturn(null); when(env.sharedDataFile()).thenReturn(null);
@ -573,7 +573,7 @@ public class MetadataRolloverServiceTests extends ESTestCase {
when(allocationService.reroute(any(ClusterState.class), any(String.class))).then(i -> i.getArguments()[0]); when(allocationService.reroute(any(ClusterState.class), any(String.class))).then(i -> i.getArguments()[0]);
RootObjectMapper.Builder root = new RootObjectMapper.Builder("_doc"); RootObjectMapper.Builder root = new RootObjectMapper.Builder("_doc");
root.add(new DateFieldMapper.Builder(dataStream.getTimeStampField().getName(), DateFieldMapper.Resolution.MILLISECONDS, root.add(new DateFieldMapper.Builder(dataStream.getTimeStampField().getName(), DateFieldMapper.Resolution.MILLISECONDS,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, ScriptCompiler.NONE, true, Version.CURRENT)); DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, ScriptCompiler.NONE, true, Version.CURRENT, "indexName"));
MetadataFieldMapper dtfm = getDataStreamTimestampFieldMapper(); MetadataFieldMapper dtfm = getDataStreamTimestampFieldMapper();
Mapping mapping = new Mapping( Mapping mapping = new Mapping(
root.build(MapperBuilderContext.ROOT), root.build(MapperBuilderContext.ROOT),

View file

@ -148,6 +148,17 @@ public class DateFieldMapperTests extends MapperTestCase {
testIgnoreMalformedForValue("-522000000", "long overflow", "date_optional_time"); testIgnoreMalformedForValue("-522000000", "long overflow", "date_optional_time");
} }
public void testResolutionLossDeprecation() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b
.field("type", "date")));
ParsedDocument doc = mapper.parse(source(b -> b.field("field", "2018-10-03T14:42:44.123456+0000")));
assertWarnings("You are attempting to store a nanosecond resolution " +
"on a field [field] of type date on index [index]. " +
"The nanosecond part was lost. Use date_nanos field type.");
}
private void testIgnoreMalformedForValue(String value, String expectedCause, String dateFormat) throws IOException { private void testIgnoreMalformedForValue(String value, String expectedCause, String dateFormat) throws IOException {
DocumentMapper mapper = createDocumentMapper(fieldMapping((builder)-> dateFieldMapping(builder, dateFormat))); DocumentMapper mapper = createDocumentMapper(fieldMapping((builder)-> dateFieldMapping(builder, dateFormat)));
@ -403,11 +414,11 @@ public class DateFieldMapperTests extends MapperTestCase {
} }
public void testFetchMillisFromIso8601Nanos() throws IOException { public void testFetchMillisFromIso8601Nanos() throws IOException {
assertFetch(dateMapperService(), "field", randomIs8601Nanos(MAX_ISO_DATE), null); assertFetch(dateNanosMapperService(), "field", randomIs8601Nanos(MAX_NANOS), null);
} }
public void testFetchMillisFromIso8601NanosFormatted() throws IOException { public void testFetchMillisFromIso8601NanosFormatted() throws IOException {
assertFetch(dateMapperService(), "field", randomIs8601Nanos(MAX_ISO_DATE), "strict_date_optional_time_nanos"); assertFetch(dateNanosMapperService(), "field", randomIs8601Nanos(MAX_NANOS), "strict_date_optional_time_nanos");
} }
/** /**
@ -418,7 +429,8 @@ public class DateFieldMapperTests extends MapperTestCase {
* way. * way.
*/ */
public void testFetchMillisFromRoundedNanos() throws IOException { public void testFetchMillisFromRoundedNanos() throws IOException {
assertFetch(dateMapperService(), "field", randomDecimalNanos(MAX_ISO_DATE), null); assertFetch(dateMapperService(), "field", randomDecimalMillis(MAX_ISO_DATE), null);
assertFetch(dateNanosMapperService(), "field", randomDecimalNanos(MAX_NANOS), null);
} }
/** /**
@ -531,7 +543,7 @@ public class DateFieldMapperTests extends MapperTestCase {
switch (((DateFieldType) ft).resolution()) { switch (((DateFieldType) ft).resolution()) {
case MILLISECONDS: case MILLISECONDS:
if (randomBoolean()) { if (randomBoolean()) {
return randomIs8601Nanos(MAX_ISO_DATE); return randomDecimalMillis(MAX_ISO_DATE);
} }
return randomLongBetween(0, Long.MAX_VALUE); return randomLongBetween(0, Long.MAX_VALUE);
case NANOSECONDS: case NANOSECONDS:
@ -564,6 +576,10 @@ public class DateFieldMapperTests extends MapperTestCase {
return Long.toString(randomLongBetween(0, maxMillis)) + "." + between(0, 999999); return Long.toString(randomLongBetween(0, maxMillis)) + "." + between(0, 999999);
} }
private String randomDecimalMillis(long maxMillis) {
return Long.toString(randomLongBetween(0, maxMillis));
}
public void testScriptAndPrecludedParameters() { public void testScriptAndPrecludedParameters() {
{ {
Exception e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> { Exception e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> {

View file

@ -14,6 +14,7 @@ import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.ResponseException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xcontent.json.JsonXContent;
import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ESRestTestCase;
@ -101,7 +102,9 @@ public class AutoCreateDataStreamIT extends ESRestTestCase {
private Response indexDocument() throws IOException { private Response indexDocument() throws IOException {
final Request indexDocumentRequest = new Request("POST", "recipe_kr/_doc"); final Request indexDocumentRequest = new Request("POST", "recipe_kr/_doc");
indexDocumentRequest.setJsonEntity("{ \"@timestamp\": \"" + Instant.now() + "\", \"name\": \"Kimchi\" }"); final Instant now = Instant.now();
final String time = DateFormatter.forPattern("strict_date_optional_time").format(now);
indexDocumentRequest.setJsonEntity("{ \"@timestamp\": \"" + time + "\", \"name\": \"Kimchi\" }");
return client().performRequest(indexDocumentRequest); return client().performRequest(indexDocumentRequest);
} }
} }

View file

@ -12,6 +12,8 @@ import org.elasticsearch.client.Response;
import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ESRestTestCase;
import java.time.ZoneOffset; import java.time.ZoneOffset;
@ -280,7 +282,7 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
for (int i = 1; i <= 100; i++) { for (int i = 1; i <= 100; i++) {
ZonedDateTime time = baseTime.plusHours(i); ZonedDateTime time = baseTime.plusHours(i);
String formattedTime = time.format(DateTimeFormatter.ISO_DATE_TIME); String formattedTime = DateFormatter.forPattern("strict_date_optional_time").format(time);
if (i % 50 == 0) { if (i % 50 == 0) {
// Anomaly has 100 docs, but we don't care about the value // Anomaly has 100 docs, but we don't care about the value
for (int j = 0; j < 100; j++) { for (int j = 0; j < 100; j++) {

View file

@ -210,8 +210,8 @@ teardown:
test_alias: {} test_alias: {}
mappings: mappings:
properties: properties:
time: date:
type: date type: date_nanos
user: user:
type: keyword type: keyword
stars: stars:

View file

@ -48,8 +48,8 @@ teardown:
test_alias: {} test_alias: {}
mappings: mappings:
properties: properties:
time: date:
type: date type: date_nanos
user: user:
type: keyword type: keyword
stars: stars:
@ -107,8 +107,8 @@ teardown:
test_alias: {} test_alias: {}
mappings: mappings:
properties: properties:
time: date:
type: date type: date_nanos
user: user:
type: keyword type: keyword
stars: stars:

View file

@ -34,6 +34,7 @@ import org.elasticsearch.client.transform.transforms.TransformStats;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
@ -52,6 +53,7 @@ import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols; import java.text.DecimalFormatSymbols;
import java.time.Instant; import java.time.Instant;
import java.time.ZoneId; import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Base64; import java.util.Base64;
import java.util.Collections; import java.util.Collections;
@ -188,7 +190,7 @@ public class TransformContinuousIT extends ESRestTestCase {
for (int i = 0; i < 100; i++) { for (int i = 0; i < 100; i++) {
dates.add( dates.add(
// create a random date between 1/1/2001 and 1/1/2006 // create a random date between 1/1/2001 and 1/1/2006
ContinuousTestCase.STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS.withZone(ZoneId.of("UTC")) formatTimestmap(dateType)
.format(Instant.ofEpochMilli(randomLongBetween(978307200000L, 1136073600000L))) .format(Instant.ofEpochMilli(randomLongBetween(978307200000L, 1136073600000L)))
); );
} }
@ -243,16 +245,18 @@ public class TransformContinuousIT extends ESRestTestCase {
} }
// simulate a different timestamp that is off from the timestamp used for sync, so it can fall into the previous bucket // simulate a different timestamp that is off from the timestamp used for sync, so it can fall into the previous bucket
String metricDateString = ContinuousTestCase.STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS.withZone(ZoneId.of("UTC")) String metricDateString = formatTimestmap(dateType)
.format(runDate.minusSeconds(randomIntBetween(0, 2)).plusNanos(randomIntBetween(0, 999999))); .format(runDate.minusSeconds(randomIntBetween(0, 2)).plusNanos(randomIntBetween(0, 999999)));
source.append("\"metric-timestamp\":\"").append(metricDateString).append("\","); source.append("\"metric-timestamp\":\"").append(metricDateString).append("\",");
String dateString = ContinuousTestCase.STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS.withZone(ZoneId.of("UTC")) final Instant timestamp = runDate.plusNanos(randomIntBetween(0, 999999));
.format(runDate.plusNanos(randomIntBetween(0, 999999))); String dateString = formatTimestmap(dateType)
.format(timestamp);
source.append("\"timestamp\":\"").append(dateString).append("\","); source.append("\"timestamp\":\"").append(dateString).append("\",");
// for data streams // for data streams
source.append("\"@timestamp\":\"").append(dateString).append("\","); //dynamic field results in a date type
source.append("\"@timestamp\":\"").append(formatTimestmap("date").format(timestamp)).append("\",");
source.append("\"run\":").append(run); source.append("\"run\":").append(run);
source.append("}"); source.append("}");
@ -295,6 +299,14 @@ public class TransformContinuousIT extends ESRestTestCase {
} }
} }
private DateFormatter formatTimestmap(String dateType) {
if(dateType == "date_nanos"){
return DateFormatter.forPattern("strict_date_optional_time_nanos").withZone(ZoneId.of("UTC"));
} else {
return DateFormatter.forPattern("strict_date_optional_time").withZone(ZoneId.of("UTC"));
}
}
/** /**
* Create the transform source index with randomized settings to increase test coverage, for example * Create the transform source index with randomized settings to increase test coverage, for example
* index sorting, triggers query optimizations. * index sorting, triggers query optimizations.