diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterBenchmark.java
index b30b3ada0ab64..a364a331400a5 100644
--- a/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterBenchmark.java
+++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterBenchmark.java
@@ -55,4 +55,3 @@ public TemporalAccessor parseJodaDate() {
return jodaFormatter.parse("1234567890");
}
}
-
diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/DenseVectorFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/DenseVectorFieldMapper.java
index fdcb1f54ea7dd..7beddc13ca598 100644
--- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/DenseVectorFieldMapper.java
+++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/DenseVectorFieldMapper.java
@@ -31,9 +31,9 @@
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.List;
import java.util.Map;
@@ -107,7 +107,7 @@ public String typeName() {
}
@Override
- public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
throw new UnsupportedOperationException(
"Field [" + name() + "] of type [" + typeName() + "] doesn't support docvalue_fields or aggregations");
}
diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java
index d3719ec884fa1..38d635ab3939f 100644
--- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java
+++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java
@@ -59,10 +59,10 @@
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.math.BigDecimal;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
@@ -301,7 +301,7 @@ public Object valueForDisplay(Object value) {
}
@Override
- public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
if (timeZone != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName()
+ "] does not support custom time zones");
diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SparseVectorFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SparseVectorFieldMapper.java
index 2eb360255d070..f7288d5039390 100644
--- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SparseVectorFieldMapper.java
+++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SparseVectorFieldMapper.java
@@ -31,9 +31,9 @@
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.List;
import java.util.Map;
@@ -107,7 +107,7 @@ public String typeName() {
}
@Override
- public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
throw new UnsupportedOperationException(
"Field [" + name() + "] of type [" + typeName() + "] doesn't support docvalue_fields or aggregations");
}
diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java
index d4cc9ee9d6e89..a228283527d66 100644
--- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java
+++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java
@@ -46,9 +46,9 @@
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
@@ -208,7 +208,7 @@ public BytesRef parseBytesRef(String value) {
};
@Override
- public DocValueFormat docValueFormat(final String format, final DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(final String format, final ZoneId timeZone) {
return COLLATE_FORMAT;
}
}
diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml
index 0f8b5517dd4d2..ea0984ef3bcbf 100644
--- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml
+++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml
@@ -9,7 +9,7 @@
index: timetest
body:
mappings:
- test: { "properties": { "my_time": {"type": "date"}}}
+ test: { "properties": { "my_time": {"type": "date", "format": "strict_date_optional_time_nanos"}}}
- do:
ingest.put_pipeline:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/180_local_dependent_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/180_locale_dependent_mapping.yml
similarity index 100%
rename from rest-api-spec/src/main/resources/rest-api-spec/test/search/180_local_dependent_mapping.yml
rename to rest-api-spec/src/main/resources/rest-api-spec/test/search/180_locale_dependent_mapping.yml
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java
index 04e372a5f91de..ce93b27d770c1 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java
@@ -29,7 +29,6 @@
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
-import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.ContextParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
@@ -368,7 +367,7 @@ public static final class Tombstone implements ToXContentObject, Writeable {
TOMBSTONE_PARSER.declareString((b, s) -> {}, new ParseField(DELETE_DATE_KEY));
}
- static final DateFormatter FORMATTER = DateFormatters.forPattern("strict_date_optional_time").withZone(ZoneOffset.UTC);
+ static final DateFormatter FORMATTER = DateFormatter.forPattern("strict_date_optional_time").withZone(ZoneOffset.UTC);
static ContextParser getParser() {
return (parser, context) -> TOMBSTONE_PARSER.apply(parser, null).build();
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
index f60866383107a..050d97ba54cf0 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
@@ -28,8 +28,8 @@
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.time.DateFormatter;
-import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
+import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException;
@@ -819,7 +819,7 @@ private static List resolveEmptyOrTrivialWildcard(IndicesOptions options
static final class DateMathExpressionResolver implements ExpressionResolver {
- private static final DateFormatter DEFAULT_DATE_FORMATTER = DateFormatters.forPattern("uuuu.MM.dd");
+ private static final DateFormatter DEFAULT_DATE_FORMATTER = DateFormatter.forPattern("uuuu.MM.dd");
private static final String EXPRESSION_LEFT_BOUND = "<";
private static final String EXPRESSION_RIGHT_BOUND = ">";
private static final char LEFT_BOUND = '{';
@@ -912,18 +912,19 @@ String resolveExpression(String expression, final Context context) {
int formatPatternTimeZoneSeparatorIndex = patternAndTZid.indexOf(TIME_ZONE_BOUND);
if (formatPatternTimeZoneSeparatorIndex != -1) {
dateFormatterPattern = patternAndTZid.substring(0, formatPatternTimeZoneSeparatorIndex);
- timeZone = ZoneId.of(patternAndTZid.substring(formatPatternTimeZoneSeparatorIndex + 1));
+ timeZone = DateUtils.of(patternAndTZid.substring(formatPatternTimeZoneSeparatorIndex + 1));
} else {
dateFormatterPattern = patternAndTZid;
timeZone = ZoneOffset.UTC;
}
- dateFormatter = DateFormatters.forPattern(dateFormatterPattern);
+ dateFormatter = DateFormatter.forPattern(dateFormatterPattern);
}
+
DateFormatter formatter = dateFormatter.withZone(timeZone);
DateMathParser dateMathParser = formatter.toDateMathParser();
- long millis = dateMathParser.parse(mathExpression, context::getStartTime, false, timeZone);
+ Instant instant = dateMathParser.parse(mathExpression, context::getStartTime, false, timeZone);
- String time = formatter.format(Instant.ofEpochMilli(millis));
+ String time = formatter.format(instant);
beforePlaceHolderSb.append(time);
inPlaceHolderSb = new StringBuilder();
inPlaceHolder = false;
diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java b/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java
index 21885d1788c7e..f8afbeb449361 100644
--- a/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java
+++ b/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java
@@ -32,7 +32,6 @@
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
-import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -48,7 +47,7 @@
*/
public final class UnassignedInfo implements ToXContentFragment, Writeable {
- public static final DateFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("dateOptionalTime").withZone(ZoneOffset.UTC);
+ public static final DateFormatter DATE_TIME_FORMATTER = DateFormatter.forPattern("dateOptionalTime").withZone(ZoneOffset.UTC);
public static final Setting INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING =
Setting.positiveTimeSetting("index.unassigned.node_left.delayed_timeout", TimeValue.timeValueMinutes(1), Property.Dynamic,
diff --git a/server/src/main/java/org/elasticsearch/common/Rounding.java b/server/src/main/java/org/elasticsearch/common/Rounding.java
index 593964f61e93f..dab29c88634e9 100644
--- a/server/src/main/java/org/elasticsearch/common/Rounding.java
+++ b/server/src/main/java/org/elasticsearch/common/Rounding.java
@@ -19,9 +19,11 @@
package org.elasticsearch.common;
import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.unit.TimeValue;
import java.io.IOException;
@@ -188,7 +190,7 @@ static class TimeUnitRounding extends Rounding {
TimeUnitRounding(StreamInput in) throws IOException {
unit = DateTimeUnit.resolve(in.readByte());
- timeZone = ZoneId.of(in.readString());
+ timeZone = DateUtils.of(in.readString());
unitRoundsToMidnight = unit.getField().getBaseUnit().getDuration().toMillis() > 60L * 60L * 1000L;
}
@@ -367,8 +369,11 @@ public long nextRoundingValue(long utcMillis) {
@Override
public void innerWriteTo(StreamOutput out) throws IOException {
out.writeByte(unit.getId());
- String tz = ZoneOffset.UTC.equals(timeZone) ? "UTC" : timeZone.getId(); // stay joda compatible
- out.writeString(tz);
+ if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
+ out.writeString(timeZone.getId());
+ } else {
+ out.writeString(DateUtils.zoneIdToDateTimeZone(timeZone).getID());
+ }
}
@Override
@@ -417,7 +422,7 @@ public String toString() {
TimeIntervalRounding(StreamInput in) throws IOException {
interval = in.readVLong();
- timeZone = ZoneId.of(in.readString());
+ timeZone = DateUtils.of(in.readString());
}
@Override
@@ -490,8 +495,11 @@ public long nextRoundingValue(long time) {
@Override
public void innerWriteTo(StreamOutput out) throws IOException {
out.writeVLong(interval);
- String tz = ZoneOffset.UTC.equals(timeZone) ? "UTC" : timeZone.getId(); // stay joda compatible
- out.writeString(tz);
+ if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
+ out.writeString(timeZone.getId());
+ } else {
+ out.writeString(DateUtils.zoneIdToDateTimeZone(timeZone).getID());
+ }
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/common/Table.java b/server/src/main/java/org/elasticsearch/common/Table.java
index a41fd267329ff..d097783a838f3 100644
--- a/server/src/main/java/org/elasticsearch/common/Table.java
+++ b/server/src/main/java/org/elasticsearch/common/Table.java
@@ -20,7 +20,6 @@
package org.elasticsearch.common;
import org.elasticsearch.common.time.DateFormatter;
-import org.elasticsearch.common.time.DateFormatters;
import java.time.Instant;
import java.time.ZoneOffset;
@@ -85,7 +84,7 @@ public Table endHeaders() {
return this;
}
- private static final DateFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
+ private static final DateFormatter FORMATTER = DateFormatter.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
public Table startRow() {
if (headers.isEmpty()) {
diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java
index fd9ffdfd31d16..7759e13e536b7 100644
--- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java
+++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java
@@ -653,6 +653,23 @@ public DateTimeZone readOptionalTimeZone() throws IOException {
return null;
}
+ /**
+ * Read a {@linkplain DateTimeZone}.
+ */
+ public ZoneId readZoneId() throws IOException {
+ return ZoneId.of(readString());
+ }
+
+ /**
+ * Read an optional {@linkplain ZoneId}.
+ */
+ public ZoneId readOptionalZoneId() throws IOException {
+ if (readBoolean()) {
+ return ZoneId.of(readString());
+ }
+ return null;
+ }
+
public int[] readIntArray() throws IOException {
int length = readArraySize();
int[] values = new int[length];
diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
index 8131335602693..699713cb0f836 100644
--- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
+++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
@@ -55,6 +55,7 @@
import java.nio.file.FileSystemLoopException;
import java.nio.file.NoSuchFileException;
import java.nio.file.NotDirectoryException;
+import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.Collection;
import java.util.Collections;
@@ -677,7 +678,6 @@ public final void writeMap(final Map map, final Writer keyWriter
writers.put(ZonedDateTime.class, (o, v) -> {
o.writeByte((byte) 23);
final ZonedDateTime zonedDateTime = (ZonedDateTime) v;
- zonedDateTime.getZone().getId();
o.writeString(zonedDateTime.getZone().getId());
o.writeLong(zonedDateTime.toInstant().toEpochMilli());
});
@@ -988,6 +988,13 @@ public void writeTimeZone(DateTimeZone timeZone) throws IOException {
writeString(timeZone.getID());
}
+ /**
+ * Write a {@linkplain ZoneId} to the stream.
+ */
+ public void writeZoneId(ZoneId timeZone) throws IOException {
+ writeString(timeZone.getId());
+ }
+
/**
* Write an optional {@linkplain DateTimeZone} to the stream.
*/
@@ -1000,6 +1007,18 @@ public void writeOptionalTimeZone(@Nullable DateTimeZone timeZone) throws IOExce
}
}
+ /**
+ * Write an optional {@linkplain ZoneId} to the stream.
+ */
+ public void writeOptionalZoneId(@Nullable ZoneId timeZone) throws IOException {
+ if (timeZone == null) {
+ writeBoolean(false);
+ } else {
+ writeBoolean(true);
+ writeZoneId(timeZone);
+ }
+ }
+
/**
* Writes a list of {@link Streamable} objects
*/
diff --git a/server/src/main/java/org/elasticsearch/common/joda/JodaDateFormatter.java b/server/src/main/java/org/elasticsearch/common/joda/JodaDateFormatter.java
index 5db95b12bb437..706e995530962 100644
--- a/server/src/main/java/org/elasticsearch/common/joda/JodaDateFormatter.java
+++ b/server/src/main/java/org/elasticsearch/common/joda/JodaDateFormatter.java
@@ -31,12 +31,12 @@
import java.time.ZonedDateTime;
import java.time.temporal.TemporalAccessor;
import java.util.Locale;
+import java.util.Objects;
public class JodaDateFormatter implements DateFormatter {
- final String pattern;
+ final String pattern;
final DateTimeFormatter parser;
-
final DateTimeFormatter printer;
public JodaDateFormatter(String pattern, DateTimeFormatter parser, DateTimeFormatter printer) {
@@ -108,4 +108,21 @@ public ZoneId zone() {
public DateMathParser toDateMathParser() {
return new JodaDateMathParser(this);
}
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(locale(), zone(), pattern());
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj.getClass().equals(this.getClass()) == false) {
+ return false;
+ }
+ JodaDateFormatter other = (JodaDateFormatter) obj;
+
+ return Objects.equals(pattern(), other.pattern()) &&
+ Objects.equals(locale(), other.locale()) &&
+ Objects.equals(zone(), other.zone());
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java b/server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java
index b86af7a75a55f..b7522c6a3233e 100644
--- a/server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java
+++ b/server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java
@@ -26,6 +26,7 @@
import org.joda.time.MutableDateTime;
import org.joda.time.format.DateTimeFormatter;
+import java.time.Instant;
import java.time.ZoneId;
import java.util.Objects;
import java.util.function.LongSupplier;
@@ -50,7 +51,7 @@ public JodaDateMathParser(JodaDateFormatter dateTimeFormatter) {
// if it has been used. For instance, the request cache does not cache requests that make
// use of `now`.
@Override
- public long parse(String text, LongSupplier now, boolean roundUp, ZoneId tz) {
+ public Instant parse(String text, LongSupplier now, boolean roundUp, ZoneId tz) {
final DateTimeZone timeZone = tz == null ? null : DateUtils.zoneIdToDateTimeZone(tz);
long time;
String mathString;
@@ -64,13 +65,13 @@ public long parse(String text, LongSupplier now, boolean roundUp, ZoneId tz) {
} else {
int index = text.indexOf("||");
if (index == -1) {
- return parseDateTime(text, timeZone, roundUp);
+ return Instant.ofEpochMilli(parseDateTime(text, timeZone, roundUp));
}
time = parseDateTime(text.substring(0, index), timeZone, false);
mathString = text.substring(index + 2);
}
- return parseMath(mathString, time, roundUp, timeZone);
+ return Instant.ofEpochMilli(parseMath(mathString, time, roundUp, timeZone));
}
private long parseMath(String mathString, long time, boolean roundUp, DateTimeZone timeZone) throws ElasticsearchParseException {
diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java
index 8d83aa30b3587..aeea14ee1f011 100644
--- a/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java
+++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java
@@ -20,7 +20,6 @@
package org.elasticsearch.common.time;
import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.joda.Joda;
import org.joda.time.DateTime;
import java.time.Instant;
@@ -87,7 +86,8 @@ default DateTime parseJoda(String input) {
* Return the given millis-since-epoch formatted with this format.
*/
default String formatMillis(long millis) {
- return format(ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC));
+ ZoneId zone = zone() != null ? zone() : ZoneOffset.UTC;
+ return format(Instant.ofEpochMilli(millis).atZone(zone));
}
/**
@@ -121,7 +121,9 @@ default String formatJoda(DateTime dateTime) {
ZoneId zone();
/**
- * Return a {@link DateMathParser} built from this formatter.
+ * Create a DateMathParser from the existing formatter
+ *
+ * @return The DateMathParser object
*/
DateMathParser toDateMathParser();
@@ -129,12 +131,11 @@ static DateFormatter forPattern(String input) {
if (Strings.hasLength(input) == false) {
throw new IllegalArgumentException("No date pattern provided");
}
- if (input.startsWith("8") == false) {
- return Joda.forPattern(input);
- }
- // dates starting with 8 will not be using joda but java time formatters
- input = input.substring(1);
+ // support the 6.x BWC compatible way of parsing java 8 dates
+ if (input.startsWith("8")) {
+ input = input.substring(1);
+ }
List formatters = new ArrayList<>();
for (String pattern : Strings.delimitedListToStringArray(input, "||")) {
diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java
index de75356a58995..2e3c2953ec375 100644
--- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java
+++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java
@@ -76,28 +76,53 @@ public class DateFormatters {
private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_PRINTER = new DateTimeFormatterBuilder()
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.appendLiteral('T')
- .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
- .appendFraction(NANO_OF_SECOND, 3, 9, true)
+ .optionalStart()
+ .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
+ .optionalStart()
+ .appendLiteral(':')
+ .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
+ .optionalStart()
+ .appendLiteral(':')
+ .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
+ .optionalStart()
+ .appendFraction(NANO_OF_SECOND, 3, 3, true)
+ .optionalEnd()
+ .optionalEnd()
.optionalStart()
.appendZoneOrOffsetId()
.optionalEnd()
+ .optionalEnd()
+ .optionalEnd()
.toFormatter(Locale.ROOT);
private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER = new DateTimeFormatterBuilder()
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.optionalStart()
.appendLiteral('T')
- .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
+ .optionalStart()
+ .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
+ .optionalStart()
+ .appendLiteral(':')
+ .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
+ .optionalStart()
+ .appendLiteral(':')
+ .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendFraction(NANO_OF_SECOND, 3, 3, true)
.optionalEnd()
.optionalStart()
+ .appendFraction(NANO_OF_SECOND, 3, 9, true)
+ .optionalEnd()
+ .optionalEnd()
+ .optionalStart()
.appendZoneOrOffsetId()
.optionalEnd()
.optionalStart()
.append(TIME_ZONE_FORMATTER_NO_COLON)
.optionalEnd()
.optionalEnd()
+ .optionalEnd()
+ .optionalEnd()
.toFormatter(Locale.ROOT);
/**
@@ -123,11 +148,33 @@ public class DateFormatters {
.optionalEnd()
.toFormatter(Locale.ROOT);
+ private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS = new DateTimeFormatterBuilder()
+ .append(STRICT_YEAR_MONTH_DAY_FORMATTER)
+ .appendLiteral('T')
+ .optionalStart()
+ .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
+ .optionalStart()
+ .appendLiteral(':')
+ .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
+ .optionalStart()
+ .appendLiteral(':')
+ .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
+ .optionalStart()
+ .appendFraction(NANO_OF_SECOND, 3, 9, true)
+ .optionalEnd()
+ .optionalEnd()
+ .optionalStart()
+ .appendZoneOrOffsetId()
+ .optionalEnd()
+ .optionalEnd()
+ .optionalEnd()
+ .toFormatter(Locale.ROOT);
+
/**
* Returns a generic ISO datetime parser where the date is mandatory and the time is optional with nanosecond resolution.
*/
private static final DateFormatter STRICT_DATE_OPTIONAL_TIME_NANOS = new JavaDateFormatter("strict_date_optional_time_nanos",
- STRICT_DATE_OPTIONAL_TIME_PRINTER, STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS);
+ STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS, STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS);
/////////////////////////////////////////
//
@@ -329,31 +376,32 @@ public class DateFormatters {
* Returns a basic formatter that combines a basic weekyear date and time
* without millis, separated by a 'T' (xxxx'W'wwe'T'HHmmssX).
*/
- private static final DateFormatter STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS = new JavaDateFormatter("strict_basic_week_date_no_millis",
- new DateTimeFormatterBuilder()
- .append(STRICT_BASIC_WEEK_DATE_PRINTER)
- .appendLiteral("T")
- .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
- .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
- .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
- .appendZoneOrOffsetId()
- .toFormatter(Locale.ROOT),
- new DateTimeFormatterBuilder()
- .append(STRICT_BASIC_WEEK_DATE_PRINTER)
- .appendLiteral("T")
- .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
- .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
- .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
- .appendZoneOrOffsetId()
- .toFormatter(Locale.ROOT),
- new DateTimeFormatterBuilder()
- .append(STRICT_BASIC_WEEK_DATE_PRINTER)
- .appendLiteral("T")
- .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
- .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
- .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
- .append(TIME_ZONE_FORMATTER_NO_COLON)
- .toFormatter(Locale.ROOT)
+ private static final DateFormatter STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS =
+ new JavaDateFormatter("strict_basic_week_date_time_no_millis",
+ new DateTimeFormatterBuilder()
+ .append(STRICT_BASIC_WEEK_DATE_PRINTER)
+ .appendLiteral("T")
+ .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
+ .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
+ .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
+ .appendZoneOrOffsetId()
+ .toFormatter(Locale.ROOT),
+ new DateTimeFormatterBuilder()
+ .append(STRICT_BASIC_WEEK_DATE_PRINTER)
+ .appendLiteral("T")
+ .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
+ .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
+ .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
+ .appendZoneOrOffsetId()
+ .toFormatter(Locale.ROOT),
+ new DateTimeFormatterBuilder()
+ .append(STRICT_BASIC_WEEK_DATE_PRINTER)
+ .appendLiteral("T")
+ .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
+ .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
+ .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
+ .append(TIME_ZONE_FORMATTER_NO_COLON)
+ .toFormatter(Locale.ROOT)
);
/*
@@ -389,7 +437,7 @@ public class DateFormatters {
* An ISO date formatter that formats or parses a date without an offset, such as '2011-12-03'.
*/
private static final DateFormatter STRICT_DATE = new JavaDateFormatter("strict_date",
- DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT));
+ DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT).withLocale(Locale.ROOT));
/*
* A date formatter that formats or parses a date plus an hour without an offset, such as '2011-12-03T01'.
@@ -514,7 +562,9 @@ public class DateFormatters {
new JavaDateFormatter("strict_hour_minute_second_millis",
STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER);
- private static final DateFormatter STRICT_HOUR_MINUTE_SECOND_FRACTION = STRICT_HOUR_MINUTE_SECOND_MILLIS;
+ private static final DateFormatter STRICT_HOUR_MINUTE_SECOND_FRACTION =
+ new JavaDateFormatter("strict_hour_minute_second_fraction",
+ STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER);
/*
* Returns a formatter that combines a full date, two digit hour of day,
@@ -537,7 +587,21 @@ public class DateFormatters {
.toFormatter(Locale.ROOT)
);
- private static final DateFormatter STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS = STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION;
+ private static final DateFormatter STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS = new JavaDateFormatter(
+ "strict_date_hour_minute_second_millis",
+ new DateTimeFormatterBuilder()
+ .append(STRICT_YEAR_MONTH_DAY_FORMATTER)
+ .appendLiteral("T")
+ .append(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER)
+ .toFormatter(Locale.ROOT),
+ new DateTimeFormatterBuilder()
+ .append(STRICT_YEAR_MONTH_DAY_FORMATTER)
+ .appendLiteral("T")
+ .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
+ // this one here is lenient as well to retain joda time based bwc compatibility
+ .appendFraction(NANO_OF_SECOND, 1, 3, true)
+ .toFormatter(Locale.ROOT)
+ );
/*
* Returns a formatter for a two digit hour of day. (HH)
@@ -782,14 +846,12 @@ public class DateFormatters {
private static final DateTimeFormatter DATE_FORMATTER = new DateTimeFormatterBuilder()
.appendValue(ChronoField.YEAR, 1, 5, SignStyle.NORMAL)
- .optionalStart()
.appendLiteral('-')
.appendValue(MONTH_OF_YEAR, 1, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral('-')
.appendValue(DAY_OF_MONTH, 1, 2, SignStyle.NOT_NEGATIVE)
.optionalEnd()
- .optionalEnd()
.toFormatter(Locale.ROOT);
private static final DateTimeFormatter HOUR_MINUTE_FORMATTER = new DateTimeFormatterBuilder()
@@ -928,7 +990,17 @@ public class DateFormatters {
.append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER)
.toFormatter(Locale.ROOT));
- private static final DateFormatter DATE_HOUR_MINUTE_SECOND_FRACTION = DATE_HOUR_MINUTE_SECOND_MILLIS;
+ private static final DateFormatter DATE_HOUR_MINUTE_SECOND_FRACTION = new JavaDateFormatter("date_hour_minute_second_fraction",
+ new DateTimeFormatterBuilder()
+ .append(STRICT_YEAR_MONTH_DAY_FORMATTER)
+ .appendLiteral("T")
+ .append(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER)
+ .toFormatter(Locale.ROOT),
+ new DateTimeFormatterBuilder()
+ .append(DATE_FORMATTER)
+ .appendLiteral("T")
+ .append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER)
+ .toFormatter(Locale.ROOT));
/*
* Returns a formatter that combines a full date, two digit hour of day,
@@ -1033,6 +1105,9 @@ public class DateFormatters {
private static final DateFormatter HOUR_MINUTE_SECOND_MILLIS = new JavaDateFormatter("hour_minute_second_millis",
STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, HOUR_MINUTE_SECOND_MILLIS_FORMATTER);
+ private static final DateFormatter HOUR_MINUTE_SECOND_FRACTION = new JavaDateFormatter("hour_minute_second_fraction",
+ STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, HOUR_MINUTE_SECOND_MILLIS_FORMATTER);
+
/*
* Returns a formatter for a two digit hour of day and two digit minute of
* hour. (HH:mm)
@@ -1272,7 +1347,7 @@ public class DateFormatters {
//
/////////////////////////////////////////
- public static DateFormatter forPattern(String input) {
+ static DateFormatter forPattern(String input) {
if (Strings.hasLength(input)) {
input = input.trim();
}
@@ -1331,7 +1406,7 @@ public static DateFormatter forPattern(String input) {
} else if ("hourMinuteSecond".equals(input) || "hour_minute_second".equals(input)) {
return HOUR_MINUTE_SECOND;
} else if ("hourMinuteSecondFraction".equals(input) || "hour_minute_second_fraction".equals(input)) {
- return HOUR_MINUTE_SECOND_MILLIS;
+ return HOUR_MINUTE_SECOND_FRACTION;
} else if ("hourMinuteSecondMillis".equals(input) || "hour_minute_second_millis".equals(input)) {
return HOUR_MINUTE_SECOND_MILLIS;
} else if ("ordinalDate".equals(input) || "ordinal_date".equals(input)) {
diff --git a/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java b/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java
index 1e997cce23be8..3ba392822ca0c 100644
--- a/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java
+++ b/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java
@@ -21,6 +21,7 @@
import org.joda.time.DateTimeZone;
+import java.time.Instant;
import java.time.ZoneId;
import java.util.function.LongSupplier;
@@ -32,7 +33,7 @@ public interface DateMathParser {
/**
* Parse a date math expression without timzeone info and rounding down.
*/
- default long parse(String text, LongSupplier now) {
+ default Instant parse(String text, LongSupplier now) {
return parse(text, now, false, (ZoneId) null);
}
@@ -42,7 +43,7 @@ default long parse(String text, LongSupplier now) {
// exists for backcompat, do not use!
@Deprecated
- default long parse(String text, LongSupplier now, boolean roundUp, DateTimeZone tz) {
+ default Instant parse(String text, LongSupplier now, boolean roundUp, DateTimeZone tz) {
return parse(text, now, roundUp, tz == null ? null : ZoneId.of(tz.getID()));
}
@@ -68,7 +69,7 @@ default long parse(String text, LongSupplier now, boolean roundUp, DateTimeZone
* @param now a supplier to retrieve the current date in milliseconds, if needed for additions
* @param roundUp should the result be rounded up
* @param tz an optional timezone that should be applied before returning the milliseconds since the epoch
- * @return the parsed date in milliseconds since the epoch
+ * @return the parsed date as an Instant since the epoch
*/
- long parse(String text, LongSupplier now, boolean roundUp, ZoneId tz);
+ Instant parse(String text, LongSupplier now, boolean roundUp, ZoneId tz);
}
diff --git a/server/src/main/java/org/elasticsearch/common/time/DateUtils.java b/server/src/main/java/org/elasticsearch/common/time/DateUtils.java
index c46cee881a1a0..e913a69dca776 100644
--- a/server/src/main/java/org/elasticsearch/common/time/DateUtils.java
+++ b/server/src/main/java/org/elasticsearch/common/time/DateUtils.java
@@ -65,12 +65,16 @@ public static ZoneId dateTimeZoneToZoneId(DateTimeZone timeZone) {
return ZoneOffset.UTC;
}
- String deprecatedId = DEPRECATED_SHORT_TIMEZONES.get(timeZone.getID());
+ return of(timeZone.getID());
+ }
+
+ public static ZoneId of(String zoneId) {
+ String deprecatedId = DEPRECATED_SHORT_TIMEZONES.get(zoneId);
if (deprecatedId != null) {
deprecationLogger.deprecatedAndMaybeLog("timezone",
- "Use of short timezone id " + timeZone.getID() + " is deprecated. Use " + deprecatedId + " instead");
+ "Use of short timezone id " + zoneId + " is deprecated. Use " + deprecatedId + " instead");
return ZoneId.of(deprecatedId);
}
- return ZoneId.of(timeZone.getID());
+ return ZoneId.of(zoneId).normalized();
}
}
diff --git a/server/src/main/java/org/elasticsearch/common/time/EpochTime.java b/server/src/main/java/org/elasticsearch/common/time/EpochTime.java
index 7e0f17c5f6d9c..c824a7c7e7c35 100644
--- a/server/src/main/java/org/elasticsearch/common/time/EpochTime.java
+++ b/server/src/main/java/org/elasticsearch/common/time/EpochTime.java
@@ -19,6 +19,8 @@
package org.elasticsearch.common.time;
+import org.elasticsearch.bootstrap.JavaVersion;
+
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
import java.time.format.ResolverStyle;
@@ -99,6 +101,10 @@ public TemporalAccessor resolve(Map fieldValues,
}
fieldValues.put(ChronoField.INSTANT_SECONDS, seconds);
fieldValues.put(ChronoField.NANO_OF_SECOND, nanos);
+ // if there is already a milli of second, we need to overwrite it
+ if (fieldValues.containsKey(ChronoField.MILLI_OF_SECOND)) {
+ fieldValues.put(ChronoField.MILLI_OF_SECOND, nanos / 1_000_000);
+ }
return null;
}
};
@@ -106,7 +112,8 @@ public TemporalAccessor resolve(Map fieldValues,
private static final EpochField NANOS_OF_MILLI = new EpochField(ChronoUnit.NANOS, ChronoUnit.MILLIS, ValueRange.of(0, 999_999)) {
@Override
public boolean isSupportedBy(TemporalAccessor temporal) {
- return temporal.isSupported(ChronoField.NANO_OF_SECOND) && temporal.getLong(ChronoField.NANO_OF_SECOND) % 1_000_000 != 0;
+ return temporal.isSupported(ChronoField.INSTANT_SECONDS) && temporal.isSupported(ChronoField.NANO_OF_SECOND)
+ && temporal.getLong(ChronoField.NANO_OF_SECOND) % 1_000_000 != 0;
}
@Override
public long getFrom(TemporalAccessor temporal) {
@@ -156,9 +163,20 @@ public long getFrom(TemporalAccessor temporal) {
builder -> builder.parseDefaulting(ChronoField.NANO_OF_SECOND, 999_999_999L),
SECONDS_FORMATTER1, SECONDS_FORMATTER2, SECONDS_FORMATTER3);
- static final DateFormatter MILLIS_FORMATTER = new JavaDateFormatter("epoch_millis", MILLISECONDS_FORMATTER3,
- builder -> builder.parseDefaulting(EpochTime.NANOS_OF_MILLI, 999_999L),
- MILLISECONDS_FORMATTER1, MILLISECONDS_FORMATTER2, MILLISECONDS_FORMATTER3);
+ static final DateFormatter MILLIS_FORMATTER = getEpochMillisFormatter();
+
+ private static DateFormatter getEpochMillisFormatter() {
+ // the third formatter fails under java 8 as a printer, so fall back to this one
+ final DateTimeFormatter printer;
+ if (JavaVersion.current().getVersion().get(0) == 8) {
+ printer = MILLISECONDS_FORMATTER1;
+ } else {
+ printer = MILLISECONDS_FORMATTER3;
+ }
+ return new JavaDateFormatter("epoch_millis", printer,
+ builder -> builder.parseDefaulting(EpochTime.NANOS_OF_MILLI, 999_999L),
+ MILLISECONDS_FORMATTER1, MILLISECONDS_FORMATTER2, MILLISECONDS_FORMATTER3);
+ }
private abstract static class EpochField implements TemporalField {
diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java
index 20ef593a32610..bcdf9cbdcf674 100644
--- a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java
+++ b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java
@@ -24,6 +24,7 @@
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
+import java.time.format.DateTimeParseException;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAccessor;
import java.time.temporal.TemporalField;
@@ -76,6 +77,8 @@ private JavaDateFormatter(String format, DateTimeFormatter printer, DateTimeForm
if (distinctLocales > 1) {
throw new IllegalArgumentException("formatters must have the same locale");
}
+ this.printer = printer;
+ this.format = format;
if (parsers.length == 0) {
this.parser = printer;
} else if (parsers.length == 1) {
@@ -87,11 +90,11 @@ private JavaDateFormatter(String format, DateTimeFormatter printer, DateTimeForm
}
this.parser = builder.toFormatter(Locale.ROOT);
}
- this.format = format;
- this.printer = printer;
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
- builder.append(this.parser);
+ if (format.contains("||") == false) {
+ builder.append(this.parser);
+ }
roundupParserConsumer.accept(builder);
DateTimeFormatter roundupFormatter = builder.toFormatter(parser.getLocale());
if (printer.getZone() != null) {
@@ -117,7 +120,12 @@ public TemporalAccessor parse(String input) {
if (Strings.isNullOrEmpty(input)) {
throw new IllegalArgumentException("cannot parse empty date");
}
- return parser.parse(input);
+
+ try {
+ return parser.parse(input);
+ } catch (DateTimeParseException e) {
+ throw new IllegalArgumentException("failed to parse date field [" + input + "] with format [" + format + "]", e);
+ }
}
@Override
@@ -162,7 +170,7 @@ public ZoneId zone() {
@Override
public DateMathParser toDateMathParser() {
- return new JavaDateMathParser(parser, roundupParser);
+ return new JavaDateMathParser(format, parser, roundupParser);
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java b/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java
index 3c9a1615a6c01..9ee390ba391a7 100644
--- a/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java
+++ b/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java
@@ -22,7 +22,6 @@
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Strings;
-import java.time.DateTimeException;
import java.time.DayOfWeek;
import java.time.Instant;
import java.time.LocalTime;
@@ -30,6 +29,7 @@
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
+import java.time.format.DateTimeParseException;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAccessor;
import java.time.temporal.TemporalAdjusters;
@@ -48,20 +48,23 @@ public class JavaDateMathParser implements DateMathParser {
private final DateTimeFormatter formatter;
private final DateTimeFormatter roundUpFormatter;
+ private final String format;
- public JavaDateMathParser(DateTimeFormatter formatter, DateTimeFormatter roundUpFormatter) {
+ JavaDateMathParser(String format, DateTimeFormatter formatter, DateTimeFormatter roundUpFormatter) {
+ this.format = format;
Objects.requireNonNull(formatter);
this.formatter = formatter;
this.roundUpFormatter = roundUpFormatter;
}
@Override
- public long parse(String text, LongSupplier now, boolean roundUp, ZoneId timeZone) {
- long time;
+ public Instant parse(String text, LongSupplier now, boolean roundUp, ZoneId timeZone) {
+ Instant time;
String mathString;
if (text.startsWith("now")) {
try {
- time = now.getAsLong();
+ // TODO only millisecond granularity here!
+ time = Instant.ofEpochMilli(now.getAsLong());
} catch (Exception e) {
throw new ElasticsearchParseException("could not read the current timestamp", e);
}
@@ -78,12 +81,12 @@ public long parse(String text, LongSupplier now, boolean roundUp, ZoneId timeZon
return parseMath(mathString, time, roundUp, timeZone);
}
- private long parseMath(final String mathString, final long time, final boolean roundUp,
+ private Instant parseMath(final String mathString, final Instant time, final boolean roundUp,
ZoneId timeZone) throws ElasticsearchParseException {
if (timeZone == null) {
timeZone = ZoneOffset.UTC;
}
- ZonedDateTime dateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(time), timeZone);
+ ZonedDateTime dateTime = ZonedDateTime.ofInstant(time, timeZone);
for (int i = 0; i < mathString.length(); ) {
char c = mathString.charAt(i++);
final boolean round;
@@ -204,18 +207,18 @@ private long parseMath(final String mathString, final long time, final boolean r
dateTime = dateTime.minus(1, ChronoField.MILLI_OF_SECOND.getBaseUnit());
}
}
- return dateTime.toInstant().toEpochMilli();
+ return dateTime.toInstant();
}
- private long parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTime) {
+ private Instant parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTime) {
if (Strings.isNullOrEmpty(value)) {
- throw new IllegalArgumentException("cannot parse empty date");
+ throw new ElasticsearchParseException("cannot parse empty date");
}
DateTimeFormatter formatter = roundUpIfNoTime ? this.roundUpFormatter : this.formatter;
try {
if (timeZone == null) {
- return DateFormatters.toZonedDateTime(formatter.parse(value)).toInstant().toEpochMilli();
+ return DateFormatters.toZonedDateTime(formatter.parse(value)).toInstant();
} else {
TemporalAccessor accessor = formatter.parse(value);
ZoneId zoneId = TemporalQueries.zone().queryFrom(accessor);
@@ -223,10 +226,11 @@ private long parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTim
timeZone = zoneId;
}
- return DateFormatters.toZonedDateTime(accessor).withZoneSameLocal(timeZone).toInstant().toEpochMilli();
+ return DateFormatters.toZonedDateTime(accessor).withZoneSameLocal(timeZone).toInstant();
}
- } catch (IllegalArgumentException | DateTimeException e) {
- throw new ElasticsearchParseException("failed to parse date field [{}]: [{}]", e, value, e.getMessage());
+ } catch (DateTimeParseException e) {
+ throw new ElasticsearchParseException("failed to parse date field [{}] with format [{}]: [{}]",
+ e, value, format, e.getMessage());
}
}
}
diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentElasticsearchExtension.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentElasticsearchExtension.java
index f32ba715a8068..3f731b73dc870 100644
--- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentElasticsearchExtension.java
+++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentElasticsearchExtension.java
@@ -22,7 +22,6 @@
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.time.DateFormatter;
-import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.script.JodaCompatibleZonedDateTime;
@@ -65,9 +64,9 @@
public class XContentElasticsearchExtension implements XContentBuilderExtension {
public static final DateTimeFormatter DEFAULT_DATE_PRINTER = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC);
- public static final DateFormatter DEFAULT_FORMATTER = DateFormatters.forPattern("strict_date_optional_time_nanos");
- public static final DateFormatter LOCAL_TIME_FORMATTER = DateFormatters.forPattern("HH:mm:ss.SSS");
- public static final DateFormatter OFFSET_TIME_FORMATTER = DateFormatters.forPattern("HH:mm:ss.SSSZZZZZ");
+ public static final DateFormatter DEFAULT_FORMATTER = DateFormatter.forPattern("strict_date_optional_time_nanos");
+ public static final DateFormatter LOCAL_TIME_FORMATTER = DateFormatter.forPattern("HH:mm:ss.SSS");
+ public static final DateFormatter OFFSET_TIME_FORMATTER = DateFormatter.forPattern("HH:mm:ss.SSSZZZZZ");
@Override
public Map, XContentBuilder.Writer> getXContentWriters() {
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java
index 69b6a6e04a936..7a5bd97770297 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java
@@ -20,7 +20,6 @@
package org.elasticsearch.index.mapper;
import com.carrotsearch.hppc.ObjectArrayList;
-
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
@@ -41,9 +40,9 @@
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Base64;
import java.util.List;
import java.util.Map;
@@ -108,7 +107,7 @@ public String typeName() {
}
@Override
- public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
return DocValueFormat.BINARY;
}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java
index 9e0b9f62acbe7..caf8baac24da1 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java
@@ -40,9 +40,9 @@
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@@ -190,7 +190,7 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) {
}
@Override
- public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats");
}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java
index 1e17aab31605b..0dcf52d5e54f2 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java
@@ -33,13 +33,15 @@
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
-import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.util.LocaleUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
@@ -49,18 +51,17 @@
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
-import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter;
-
-/** A {@link FieldMapper} for ip addresses. */
+/** A {@link FieldMapper} for dates. */
public class DateFieldMapper extends FieldMapper {
public static final String CONTENT_TYPE = "date";
@@ -73,8 +74,8 @@ public static class Defaults {
public static class Builder extends FieldMapper.Builder {
private Boolean ignoreMalformed;
+ private Explicit format = new Explicit<>(DEFAULT_DATE_TIME_FORMATTER.pattern(), false);
private Locale locale;
- private boolean dateTimeFormatterSet = false;
public Builder(String name) {
super(name, new DateFieldType(), new DateFieldType());
@@ -102,27 +103,37 @@ protected Explicit ignoreMalformed(BuilderContext context) {
return Defaults.IGNORE_MALFORMED;
}
- /** Whether an explicit format for this date field has been set already. */
- public boolean isDateTimeFormatterSet() {
- return dateTimeFormatterSet;
+ public Builder locale(Locale locale) {
+ this.locale = locale;
+ return this;
+ }
+
+ public Locale locale() {
+ return locale;
}
- public Builder dateTimeFormatter(DateFormatter dateTimeFormatter) {
- fieldType().setDateTimeFormatter(dateTimeFormatter);
- dateTimeFormatterSet = true;
+ public String format() {
+ return format.value();
+ }
+
+ public Builder format(String format) {
+ this.format = new Explicit<>(format, true);
return this;
}
- public void locale(Locale locale) {
- this.locale = locale;
+ public boolean isFormatterSet() {
+ return format.explicit();
}
@Override
protected void setupFieldType(BuilderContext context) {
super.setupFieldType(context);
+ String pattern = this.format.value();
DateFormatter dateTimeFormatter = fieldType().dateTimeFormatter;
- if (!locale.equals(dateTimeFormatter.locale())) {
- fieldType().setDateTimeFormatter(dateTimeFormatter.withLocale(locale));
+
+ boolean hasPatternChanged = Strings.hasLength(pattern) && Objects.equals(pattern, dateTimeFormatter.pattern()) == false;
+ if (hasPatternChanged || Objects.equals(builder.locale, dateTimeFormatter.locale()) == false) {
+ fieldType().setDateTimeFormatter(DateFormatter.forPattern(pattern).withLocale(locale));
}
}
@@ -160,7 +171,7 @@ public Mapper.Builder,?> parse(String name, Map node, ParserCo
builder.locale(LocaleUtils.parse(propNode.toString()));
iterator.remove();
} else if (propName.equals("format")) {
- builder.dateTimeFormatter(parseDateTimeFormatter(propNode));
+ builder.format(propNode.toString());
iterator.remove();
} else if (TypeParsers.parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove();
@@ -196,13 +207,12 @@ public MappedFieldType clone() {
public boolean equals(Object o) {
if (!super.equals(o)) return false;
DateFieldType that = (DateFieldType) o;
- return Objects.equals(dateTimeFormatter.pattern(), that.dateTimeFormatter.pattern()) &&
- Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale());
+ return Objects.equals(dateTimeFormatter, that.dateTimeFormatter);
}
@Override
public int hashCode() {
- return Objects.hash(super.hashCode(), dateTimeFormatter.pattern(), dateTimeFormatter.locale());
+ return Objects.hash(super.hashCode(), dateTimeFormatter);
}
@Override
@@ -214,10 +224,10 @@ public String typeName() {
public void checkCompatibility(MappedFieldType fieldType, List conflicts) {
super.checkCompatibility(fieldType, conflicts);
DateFieldType other = (DateFieldType) fieldType;
- if (Objects.equals(dateTimeFormatter().pattern(), other.dateTimeFormatter().pattern()) == false) {
+ if (Objects.equals(dateTimeFormatter.pattern(), other.dateTimeFormatter.pattern()) == false) {
conflicts.add("mapper [" + name() + "] has different [format] values");
}
- if (Objects.equals(dateTimeFormatter().locale(), other.dateTimeFormatter().locale()) == false) {
+ if (Objects.equals(dateTimeFormatter.locale(), other.dateTimeFormatter.locale()) == false) {
conflicts.add("mapper [" + name() + "] has different [locale] values");
}
}
@@ -226,9 +236,9 @@ public DateFormatter dateTimeFormatter() {
return dateTimeFormatter;
}
- public void setDateTimeFormatter(DateFormatter dateTimeFormatter) {
+ void setDateTimeFormatter(DateFormatter formatter) {
checkIfFrozen();
- this.dateTimeFormatter = dateTimeFormatter;
+ this.dateTimeFormatter = formatter;
this.dateMathParser = dateTimeFormatter.toDateMathParser();
}
@@ -237,7 +247,7 @@ protected DateMathParser dateMathParser() {
}
long parse(String value) {
- return dateTimeFormatter().parseMillis(value);
+ return DateFormatters.toZonedDateTime(dateTimeFormatter().parse(value)).toInstant().toEpochMilli();
}
@Override
@@ -260,7 +270,7 @@ public Query termQuery(Object value, @Nullable QueryShardContext context) {
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, ShapeRelation relation,
- @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) {
+ @Nullable ZoneId timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) {
failIfNotIndexed();
if (relation == ShapeRelation.DISJOINT) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() +
@@ -294,8 +304,8 @@ public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower
return query;
}
- public long parseToMilliseconds(Object value, boolean roundUp, @Nullable DateTimeZone zone,
- @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) {
+ public long parseToMilliseconds(Object value, boolean roundUp,
+ @Nullable ZoneId zone, @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) {
DateMathParser dateParser = dateMathParser();
if (forcedDateParser != null) {
dateParser = forcedDateParser;
@@ -307,13 +317,13 @@ public long parseToMilliseconds(Object value, boolean roundUp, @Nullable DateTim
} else {
strValue = value.toString();
}
- return dateParser.parse(strValue, context::nowInMillis, roundUp, DateUtils.dateTimeZoneToZoneId(zone));
+ return dateParser.parse(strValue, context::nowInMillis, roundUp, zone).toEpochMilli();
}
@Override
- public Relation isFieldWithinQuery(IndexReader reader, Object from, Object to, boolean includeLower, boolean includeUpper,
- DateTimeZone timeZone, DateMathParser dateParser,
- QueryRewriteContext context) throws IOException {
+ public Relation isFieldWithinQuery(IndexReader reader,
+ Object from, Object to, boolean includeLower, boolean includeUpper,
+ ZoneId timeZone, DateMathParser dateParser, QueryRewriteContext context) throws IOException {
if (dateParser == null) {
dateParser = this.dateMathParser;
}
@@ -376,13 +386,13 @@ public Object valueForDisplay(Object value) {
}
@Override
- public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) {
DateFormatter dateTimeFormatter = this.dateTimeFormatter;
if (format != null) {
- dateTimeFormatter = DateFormatter.forPattern(format);
+ dateTimeFormatter = DateFormatter.forPattern(format).withLocale(dateTimeFormatter.locale());
}
if (timeZone == null) {
- timeZone = DateTimeZone.UTC;
+ timeZone = ZoneOffset.UTC;
}
return new DocValueFormat.DateTime(dateTimeFormatter, timeZone);
}
@@ -442,7 +452,7 @@ protected void parseCreateField(ParseContext context, List field
long timestamp;
try {
timestamp = fieldType().parse(dateAsString);
- } catch (IllegalArgumentException e) {
+ } catch (IllegalArgumentException | ElasticsearchParseException e) {
if (ignoreMalformed.value()) {
context.addIgnoredField(fieldType.name());
return;
@@ -489,8 +499,9 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults,
|| fieldType().dateTimeFormatter().pattern().equals(DEFAULT_DATE_TIME_FORMATTER.pattern()) == false) {
builder.field("format", fieldType().dateTimeFormatter().pattern());
}
+
if (includeDefaults
- || fieldType().dateTimeFormatter().locale() != Locale.ROOT) {
+ || fieldType().dateTimeFormatter().locale().equals(DEFAULT_DATE_TIME_FORMATTER.locale()) == false) {
builder.field("locale", fieldType().dateTimeFormatter().locale());
}
}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
index 54e59691f80d5..fe2bd6e9eed59 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
@@ -21,6 +21,7 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexableField;
+import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
@@ -35,6 +36,7 @@
import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
import java.io.IOException;
+import java.time.format.DateTimeParseException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -672,7 +674,7 @@ private static Mapper.Builder,?> createBuilderFromFieldType(final ParseContext
private static Mapper.Builder, ?> newDateBuilder(String name, DateFormatter dateTimeFormatter, Version indexCreated) {
DateFieldMapper.Builder builder = new DateFieldMapper.Builder(name);
if (dateTimeFormatter != null) {
- builder.dateTimeFormatter(dateTimeFormatter);
+ builder.format(dateTimeFormatter.pattern()).locale(dateTimeFormatter.locale());
}
return builder;
}
@@ -717,8 +719,8 @@ private static Mapper.Builder,?> createBuilderFromDynamicValue(final ParseCont
// `epoch_millis` or `YYYY`
for (DateFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) {
try {
- dateTimeFormatter.parseMillis(text);
- } catch (IllegalArgumentException e) {
+ dateTimeFormatter.parse(text);
+ } catch (ElasticsearchParseException | DateTimeParseException | IllegalArgumentException e) {
// failure to parse this, continue
continue;
}
@@ -728,8 +730,8 @@ private static Mapper.Builder,?> createBuilderFromDynamicValue(final ParseCont
}
if (builder instanceof DateFieldMapper.Builder) {
DateFieldMapper.Builder dateBuilder = (DateFieldMapper.Builder) builder;
- if (dateBuilder.isDateTimeFormatterSet() == false) {
- dateBuilder.dateTimeFormatter(dateTimeFormatter);
+ if (dateBuilder.isFormatterSet() == false) {
+ dateBuilder.format(dateTimeFormatter.pattern()).locale(dateTimeFormatter.locale());
}
}
return builder;
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java
index a8ef46b93060e..2b52e42ffe558 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java
@@ -44,10 +44,10 @@
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.net.InetAddress;
+import java.time.ZoneId;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
@@ -303,7 +303,7 @@ public Object valueForDisplay(Object value) {
}
@Override
- public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats");
}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java
index f785e01125f69..5ef689709400d 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java
@@ -50,9 +50,9 @@
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.List;
import java.util.Objects;
@@ -335,10 +335,10 @@ public Query termsQuery(List> values, @Nullable QueryShardContext context) {
* @param relation the relation, nulls should be interpreted like INTERSECTS
*/
public Query rangeQuery(
- Object lowerTerm, Object upperTerm,
- boolean includeLower, boolean includeUpper,
- ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser,
- QueryShardContext context) {
+ Object lowerTerm, Object upperTerm,
+ boolean includeLower, boolean includeUpper,
+ ShapeRelation relation, ZoneId timeZone, DateMathParser parser,
+ QueryShardContext context) {
throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support range queries");
}
@@ -413,7 +413,7 @@ public Relation isFieldWithinQuery(
IndexReader reader,
Object from, Object to,
boolean includeLower, boolean includeUpper,
- DateTimeZone timeZone, DateMathParser dateMathParser, QueryRewriteContext context) throws IOException {
+ ZoneId timeZone, DateMathParser dateMathParser, QueryRewriteContext context) throws IOException {
return Relation.INTERSECTS;
}
@@ -448,7 +448,7 @@ public void setEagerGlobalOrdinals(boolean eagerGlobalOrdinals) {
/** Return a {@link DocValueFormat} that can be used to display and parse
* values as returned by the fielddata API.
* The default implementation returns a {@link DocValueFormat#RAW}. */
- public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats");
}
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java
index 8d9a688776548..06e12ca8b5e4c 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java
@@ -53,9 +53,9 @@
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
@@ -961,7 +961,7 @@ public Object valueForDisplay(Object value) {
}
@Override
- public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) {
+ public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
if (timeZone != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName()
+ "] does not support custom time zones");
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java
index d93c909ff8445..e5ba55de7bfd0 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java
@@ -42,6 +42,7 @@
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.network.InetAddresses;
@@ -49,19 +50,18 @@
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateMathParser;
-import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.util.LocaleUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
import org.elasticsearch.index.query.QueryShardContext;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.time.ZoneId;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
@@ -71,7 +71,6 @@
import java.util.Objects;
import java.util.Set;
-import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter;
import static org.elasticsearch.index.query.RangeQueryBuilder.GTE_FIELD;
import static org.elasticsearch.index.query.RangeQueryBuilder.GT_FIELD;
import static org.elasticsearch.index.query.RangeQueryBuilder.LTE_FIELD;
@@ -92,12 +91,12 @@ public static class Defaults {
public static class Builder extends FieldMapper.Builder {
private Boolean coerce;
- private Locale locale;
+ private Locale locale = Locale.ROOT;
+ private String pattern;
public Builder(String name, RangeType type) {
super(name, new RangeFieldType(type), new RangeFieldType(type));
builder = this;
- locale = Locale.ROOT;
}
@Override
@@ -128,8 +127,8 @@ protected Explicit coerce(BuilderContext context) {
return Defaults.COERCE;
}
- public Builder dateTimeFormatter(DateFormatter dateTimeFormatter) {
- fieldType().setDateTimeFormatter(dateTimeFormatter);
+ public Builder format(String format) {
+ this.pattern = format;
return this;
}
@@ -145,12 +144,15 @@ public void locale(Locale locale) {
@Override
protected void setupFieldType(BuilderContext context) {
super.setupFieldType(context);
- DateFormatter dateTimeFormatter = fieldType().dateTimeFormatter;
+ DateFormatter formatter = fieldType().dateTimeFormatter;
if (fieldType().rangeType == RangeType.DATE) {
- if (!locale.equals(dateTimeFormatter.locale())) {
- fieldType().setDateTimeFormatter(dateTimeFormatter.withLocale(locale));
+ boolean hasPatternChanged = Strings.hasLength(builder.pattern) &&
+ Objects.equals(builder.pattern, formatter.pattern()) == false;
+
+ if (hasPatternChanged || Objects.equals(builder.locale, formatter.locale()) == false) {
+ fieldType().setDateTimeFormatter(DateFormatter.forPattern(pattern).withLocale(locale));
}
- } else if (dateTimeFormatter != null) {
+ } else if (pattern != null) {
throw new IllegalArgumentException("field [" + name() + "] of type [" + fieldType().rangeType
+ "] should not define a dateTimeFormatter unless it is a " + RangeType.DATE + " type");
}
@@ -190,7 +192,7 @@ public Mapper.Builder,?> parse(String name, Map node,
builder.locale(LocaleUtils.parse(propNode.toString()));
iterator.remove();
} else if (propName.equals("format")) {
- builder.dateTimeFormatter(parseDateTimeFormatter(propNode));
+ builder.format(propNode.toString());
iterator.remove();
} else if (TypeParsers.parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove();
@@ -219,8 +221,8 @@ public static final class RangeFieldType extends MappedFieldType {
RangeFieldType(RangeFieldType other) {
super(other);
this.rangeType = other.rangeType;
- if (other.dateTimeFormatter() != null) {
- setDateTimeFormatter(other.dateTimeFormatter);
+ if (other.rangeType == RangeType.DATE && other.dateTimeFormatter() != null) {
+ setDateTimeFormatter(other.dateTimeFormatter());
}
}
@@ -235,15 +237,13 @@ public boolean equals(Object o) {
RangeFieldType that = (RangeFieldType) o;
return Objects.equals(rangeType, that.rangeType) &&
(rangeType == RangeType.DATE) ?
- Objects.equals(dateTimeFormatter.pattern(), that.dateTimeFormatter.pattern())
- && Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale())
+ Objects.equals(dateTimeFormatter, that.dateTimeFormatter)
: dateTimeFormatter == null && that.dateTimeFormatter == null;
}
@Override
public int hashCode() {
- return (dateTimeFormatter == null) ? Objects.hash(super.hashCode(), rangeType)
- : Objects.hash(super.hashCode(), rangeType, dateTimeFormatter.pattern(), dateTimeFormatter.locale());
+ return Objects.hash(super.hashCode(), rangeType, dateTimeFormatter);
}
@Override
@@ -285,7 +285,7 @@ public Query termQuery(Object value, QueryShardContext context) {
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
- ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser, QueryShardContext context) {
+ ShapeRelation relation, ZoneId timeZone, DateMathParser parser, QueryShardContext context) {
failIfNotIndexed();
if (parser == null) {
parser = dateMathParser();
@@ -543,7 +543,8 @@ public Field getRangeField(String name, Range r) {
return new LongRange(name, new long[] {((Number)r.from).longValue()}, new long[] {((Number)r.to).longValue()});
}
private Number parse(DateMathParser dateMathParser, String dateStr) {
- return dateMathParser.parse(dateStr, () -> {throw new IllegalArgumentException("now is not used at indexing time");});
+ return dateMathParser.parse(dateStr, () -> {throw new IllegalArgumentException("now is not used at indexing time");})
+ .toEpochMilli();
}
@Override
public Number parseFrom(RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included)
@@ -586,18 +587,18 @@ public Query dvRangeQuery(String field, QueryType queryType, Object from, Object
@Override
public Query rangeQuery(String field, boolean hasDocValues, Object lowerTerm, Object upperTerm, boolean includeLower,
- boolean includeUpper, ShapeRelation relation, @Nullable DateTimeZone timeZone,
+ boolean includeUpper, ShapeRelation relation, @Nullable ZoneId timeZone,
@Nullable DateMathParser parser, QueryShardContext context) {
- DateTimeZone zone = (timeZone == null) ? DateTimeZone.UTC : timeZone;
- ZoneId zoneId = DateUtils.dateTimeZoneToZoneId(zone);
+ ZoneId zone = (timeZone == null) ? ZoneOffset.UTC : timeZone;
+
DateMathParser dateMathParser = (parser == null) ?
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser() : parser;
Long low = lowerTerm == null ? Long.MIN_VALUE :
dateMathParser.parse(lowerTerm instanceof BytesRef ? ((BytesRef) lowerTerm).utf8ToString() : lowerTerm.toString(),
- context::nowInMillis, false, zoneId);
+ context::nowInMillis, false, zone).toEpochMilli();
Long high = upperTerm == null ? Long.MAX_VALUE :
dateMathParser.parse(upperTerm instanceof BytesRef ? ((BytesRef) upperTerm).utf8ToString() : upperTerm.toString(),
- context::nowInMillis, false, zoneId);
+ context::nowInMillis, false, zone).toEpochMilli();
return super.rangeQuery(field, hasDocValues, low, high, includeLower, includeUpper, relation, zone,
dateMathParser, context);
@@ -910,7 +911,7 @@ public Object parse(Object value, boolean coerce) {
return numberType.parse(value, coerce);
}
public Query rangeQuery(String field, boolean hasDocValues, Object from, Object to, boolean includeFrom, boolean includeTo,
- ShapeRelation relation, @Nullable DateTimeZone timeZone, @Nullable DateMathParser dateMathParser,
+ ShapeRelation relation, @Nullable ZoneId timeZone, @Nullable DateMathParser dateMathParser,
QueryShardContext context) {
Object lower = from == null ? minValue() : parse(from, false);
Object upper = to == null ? maxValue() : parse(to, false);
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java
index f35e6126dbe71..6d2f0fddd86c2 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java
@@ -22,7 +22,6 @@
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
-import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.xcontent.ToXContent;
@@ -46,7 +45,7 @@ public static class Defaults {
public static final DateFormatter[] DYNAMIC_DATE_TIME_FORMATTERS =
new DateFormatter[]{
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER,
- Joda.getStrictStandardDateFormatter()
+ DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis")
};
public static final boolean DATE_DETECTION = true;
public static final boolean NUMERIC_DETECTION = false;
@@ -55,8 +54,7 @@ public static class Defaults {
public static class Builder extends ObjectMapper.Builder {
protected Explicit dynamicTemplates = new Explicit<>(new DynamicTemplate[0], false);
- protected Explicit dynamicDateTimeFormatters =
- new Explicit<>(Defaults.DYNAMIC_DATE_TIME_FORMATTERS, false);
+ protected Explicit dynamicDateTimeFormatters = new Explicit<>(Defaults.DYNAMIC_DATE_TIME_FORMATTERS, false);
protected Explicit dateDetection = new Explicit<>(Defaults.DATE_DETECTION, false);
protected Explicit numericDetection = new Explicit<>(Defaults.NUMERIC_DETECTION, false);
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java
index 3d3b160787050..366eb3b36f0fe 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java
@@ -23,7 +23,8 @@
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.index.query.QueryShardContext;
-import org.joda.time.DateTimeZone;
+
+import java.time.ZoneId;
/**
* {@link MappedFieldType} base impl for field types that are neither dates nor ranges.
@@ -40,7 +41,7 @@ protected SimpleMappedFieldType(MappedFieldType ref) {
@Override
public final Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
- ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser, QueryShardContext context) {
+ ShapeRelation relation, ZoneId timeZone, DateMathParser parser, QueryShardContext context) {
if (relation == ShapeRelation.DISJOINT) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() +
"] does not support DISJOINT ranges");
@@ -52,7 +53,7 @@ public final Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includ
}
/**
- * Same as {@link #rangeQuery(Object, Object, boolean, boolean, ShapeRelation, DateTimeZone, DateMathParser, QueryShardContext)}
+ * Same as {@link #rangeQuery(Object, Object, boolean, boolean, ShapeRelation, ZoneId, DateMathParser, QueryShardContext)}
* but without the trouble of relations or date-specific options.
*/
protected Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java
index d93caf3c4e8d1..8cf66009ea140 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java
@@ -43,7 +43,6 @@ public class TypeParsers {
public static final String INDEX_OPTIONS_POSITIONS = "positions";
public static final String INDEX_OPTIONS_OFFSETS = "offsets";
-
private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, String name, Map fieldNode,
Mapper.TypeParser.ParserContext parserContext) {
NamedAnalyzer indexAnalyzer = null;
diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java
index 6ae9055efcefc..363384030a2ac 100644
--- a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java
+++ b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java
@@ -38,9 +38,9 @@
import org.elasticsearch.index.query.support.QueryParsers;
import org.elasticsearch.index.search.QueryParserHelper;
import org.elasticsearch.index.search.QueryStringQueryParser;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
@@ -144,7 +144,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder i
private static final ParseField RELATION_FIELD = new ParseField("relation");
private final String fieldName;
-
private Object from;
-
private Object to;
-
- private DateTimeZone timeZone;
-
+ private ZoneId timeZone;
private boolean includeLower = DEFAULT_INCLUDE_LOWER;
-
private boolean includeUpper = DEFAULT_INCLUDE_UPPER;
-
- private DateFormatter format;
-
+ private String format;
private ShapeRelation relation;
/**
@@ -101,11 +95,8 @@ public RangeQueryBuilder(StreamInput in) throws IOException {
to = in.readGenericValue();
includeLower = in.readBoolean();
includeUpper = in.readBoolean();
- timeZone = in.readOptionalTimeZone();
- String formatString = in.readOptionalString();
- if (formatString != null) {
- format = DateFormatter.forPattern(formatString);
- }
+ timeZone = in.readOptionalZoneId();
+ format = in.readOptionalString();
String relationString = in.readOptionalString();
if (relationString != null) {
relation = ShapeRelation.getRelationByName(relationString);
@@ -129,12 +120,8 @@ protected void doWriteTo(StreamOutput out) throws IOException {
out.writeGenericValue(this.to);
out.writeBoolean(this.includeLower);
out.writeBoolean(this.includeUpper);
- out.writeOptionalTimeZone(timeZone);
- String formatString = null;
- if (this.format != null) {
- formatString = this.format.pattern();
- }
- out.writeOptionalString(formatString);
+ out.writeOptionalZoneId(timeZone);
+ out.writeOptionalString(format);
String relationString = null;
if (this.relation != null) {
relationString = this.relation.getRelationName();
@@ -267,7 +254,11 @@ public RangeQueryBuilder timeZone(String timeZone) {
if (timeZone == null) {
throw new IllegalArgumentException("timezone cannot be null");
}
- this.timeZone = DateTimeZone.forID(timeZone);
+ try {
+ this.timeZone = ZoneId.of(timeZone);
+ } catch (DateTimeException e) {
+ throw new IllegalArgumentException(e);
+ }
return this;
}
@@ -275,10 +266,10 @@ public RangeQueryBuilder timeZone(String timeZone) {
* In case of date field, gets the from/to fields timezone adjustment
*/
public String timeZone() {
- return this.timeZone == null ? null : this.timeZone.getID();
+ return this.timeZone == null ? null : this.timeZone.getId();
}
- DateTimeZone getDateTimeZone() { // for testing
+ ZoneId getDateTimeZone() { // for testing
return timeZone;
}
@@ -289,7 +280,9 @@ public RangeQueryBuilder format(String format) {
if (format == null) {
throw new IllegalArgumentException("format cannot be null");
}
- this.format = DateFormatter.forPattern(format);
+ // this just ensure that the pattern is actually valid, no need to keep it here
+ DateFormatter.forPattern(format);
+ this.format = format;
return this;
}
@@ -297,12 +290,12 @@ public RangeQueryBuilder format(String format) {
* Gets the format field to parse the from/to fields
*/
public String format() {
- return this.format == null ? null : this.format.pattern();
+ return format;
}
DateMathParser getForceDateParser() { // pkg private for testing
- if (this.format != null) {
- return this.format.toDateMathParser();
+ if (Strings.hasText(format)) {
+ return DateFormatter.forPattern(this.format).toDateMathParser();
}
return null;
}
@@ -334,10 +327,10 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep
builder.field(INCLUDE_LOWER_FIELD.getPreferredName(), includeLower);
builder.field(INCLUDE_UPPER_FIELD.getPreferredName(), includeUpper);
if (timeZone != null) {
- builder.field(TIME_ZONE_FIELD.getPreferredName(), timeZone.getID());
+ builder.field(TIME_ZONE_FIELD.getPreferredName(), timeZone.getId());
}
- if (format != null) {
- builder.field(FORMAT_FIELD.getPreferredName(), format.pattern());
+ if (Strings.hasText(format)) {
+ builder.field(FORMAT_FIELD.getPreferredName(), format);
}
if (relation != null) {
builder.field(RELATION_FIELD.getPreferredName(), relation.getRelationName());
@@ -531,21 +524,17 @@ protected Query doToQuery(QueryShardContext context) throws IOException {
@Override
protected int doHashCode() {
- String timeZoneId = timeZone == null ? null : timeZone.getID();
- String formatString = format == null ? null : format.pattern();
- return Objects.hash(fieldName, from, to, timeZoneId, includeLower, includeUpper, formatString);
+ return Objects.hash(fieldName, from, to, timeZone, includeLower, includeUpper, format);
}
@Override
protected boolean doEquals(RangeQueryBuilder other) {
- String timeZoneId = timeZone == null ? null : timeZone.getID();
- String formatString = format == null ? null : format.pattern();
return Objects.equals(fieldName, other.fieldName) &&
Objects.equals(from, other.from) &&
Objects.equals(to, other.to) &&
- Objects.equals(timeZoneId, other.timeZone()) &&
+ Objects.equals(timeZone, other.timeZone) &&
Objects.equals(includeLower, other.includeLower) &&
Objects.equals(includeUpper, other.includeUpper) &&
- Objects.equals(formatString, other.format());
+ Objects.equals(format, other.format);
}
}
diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java
index 4974ef9277e9a..dc5354c7e0522 100644
--- a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java
+++ b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java
@@ -54,9 +54,9 @@
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.support.QueryParsers;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@@ -89,7 +89,7 @@ public class QueryStringQueryParser extends XQueryParser {
private Analyzer forceQuoteAnalyzer;
private String quoteFieldSuffix;
private boolean analyzeWildcard;
- private DateTimeZone timeZone;
+ private ZoneId timeZone;
private Fuzziness fuzziness = Fuzziness.AUTO;
private int fuzzyMaxExpansions = FuzzyQuery.defaultMaxExpansions;
private MappedFieldType currentFieldType;
@@ -227,7 +227,7 @@ public void setAnalyzeWildcard(boolean analyzeWildcard) {
/**
* @param timeZone Time Zone to be applied to any range query related to dates.
*/
- public void setTimeZone(DateTimeZone timeZone) {
+ public void setTimeZone(ZoneId timeZone) {
this.timeZone = timeZone;
}
diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java
index 719558edbf748..90ebc8e074108 100644
--- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java
+++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java
@@ -19,9 +19,6 @@
package org.elasticsearch.ingest;
-import java.util.Collections;
-import java.util.IdentityHashMap;
-import java.util.Set;
import org.elasticsearch.common.Strings;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.mapper.IdFieldMapper;
@@ -37,12 +34,15 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
+import java.util.Collections;
import java.util.Date;
import java.util.EnumMap;
import java.util.HashMap;
+import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
+import java.util.Set;
/**
* Represents a single document being captured before indexing and holds the source and metadata (like id, type and index).
diff --git a/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java b/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java
index 5b55c00875d47..7bf26fd5e57a4 100644
--- a/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java
+++ b/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java
@@ -22,7 +22,6 @@
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.time.DateFormatter;
-import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.TimeValue;
import java.lang.management.ManagementFactory;
@@ -43,7 +42,7 @@ public class HotThreads {
private static final Object mutex = new Object();
- private static final DateFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("dateOptionalTime");
+ private static final DateFormatter DATE_TIME_FORMATTER = DateFormatter.forPattern("dateOptionalTime");
private int busiestThreads = 3;
private TimeValue interval = new TimeValue(500, TimeUnit.MILLISECONDS);
diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java
index 676f2bbdc7b2e..bb449d584b2c8 100644
--- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java
+++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java
@@ -39,7 +39,7 @@
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.Table;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.time.DateFormatters;
+import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.rest.RestController;
@@ -61,6 +61,7 @@
public class RestIndicesAction extends AbstractCatAction {
+ private static final DateFormatter STRICT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_time");
private final IndexNameExpressionResolver indexNameExpressionResolver;
public RestIndicesAction(Settings settings, RestController controller, IndexNameExpressionResolver indexNameExpressionResolver) {
@@ -432,7 +433,7 @@ Table buildTable(RestRequest request, Index[] indices, ClusterHealthResponse res
table.addCell(indexMetaData.getCreationDate());
ZonedDateTime creationTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(indexMetaData.getCreationDate()), ZoneOffset.UTC);
- table.addCell(DateFormatters.forPattern("strict_date_time").format(creationTime));
+ table.addCell(STRICT_DATE_TIME_FORMATTER.format(creationTime));
table.addCell(totalStats.getStore() == null ? null : totalStats.getStore().size());
table.addCell(primaryStats.getStore() == null ? null : primaryStats.getStore().size());
diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java
index fb302b1b3b3a4..22258ce2d8878 100644
--- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java
+++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java
@@ -26,7 +26,6 @@
import org.elasticsearch.common.Table;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
-import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
@@ -99,7 +98,7 @@ protected Table getTableWithHeader(RestRequest request) {
.endHeaders();
}
- private static final DateFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
+ private static final DateFormatter FORMATTER = DateFormatter.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
private Table buildTable(RestRequest req, GetSnapshotsResponse getSnapshotsResponse) {
Table table = getTableWithHeader(req);
diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java
index 39b3f08dcdc5f..573eac6c04941 100644
--- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java
+++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java
@@ -28,7 +28,6 @@
import org.elasticsearch.common.Table;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
-import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
@@ -125,7 +124,7 @@ protected Table getTableWithHeader(final RestRequest request) {
return table;
}
- private static final DateFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
+ private static final DateFormatter FORMATTER = DateFormatter.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
private void buildRow(Table table, boolean fullId, boolean detailed, DiscoveryNodes discoveryNodes, TaskInfo taskInfo) {
table.startRow();
diff --git a/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java b/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java
index 546deb3a24b68..fc3816cad8a15 100644
--- a/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java
+++ b/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java
@@ -23,7 +23,6 @@
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.time.DateFormatter;
-import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateUtils;
import org.joda.time.DateTime;
@@ -50,7 +49,7 @@
* A wrapper around ZonedDateTime that exposes joda methods for backcompat.
*/
public class JodaCompatibleZonedDateTime {
- private static final DateFormatter DATE_FORMATTER = DateFormatters.forPattern("strict_date_time");
+ private static final DateFormatter DATE_FORMATTER = DateFormatter.forPattern("strict_date_time");
private static final DeprecationLogger deprecationLogger =
new DeprecationLogger(LogManager.getLogger(JodaCompatibleZonedDateTime.class));
diff --git a/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java b/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java
index a911a4f197d67..753ef1fb23d85 100644
--- a/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java
+++ b/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java
@@ -212,7 +212,7 @@ public static final class DecayDateLinear {
double scaling;
public DecayDateLinear(String originStr, String scaleStr, String offsetStr, double decay) {
- this.origin = dateParser.parse(originStr, null, false, defaultZoneId);
+ this.origin = dateParser.parse(originStr, null, false, defaultZoneId).toEpochMilli();
long scale = TimeValue.parseTimeValue(scaleStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale")
.getMillis();
this.offset = TimeValue.parseTimeValue(offsetStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset")
@@ -235,7 +235,7 @@ public static final class DecayDateExp {
double scaling;
public DecayDateExp(String originStr, String scaleStr, String offsetStr, double decay) {
- this.origin = dateParser.parse(originStr, null, false, defaultZoneId);
+ this.origin = dateParser.parse(originStr, null, false, defaultZoneId).toEpochMilli();
long scale = TimeValue.parseTimeValue(scaleStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale")
.getMillis();
this.offset = TimeValue.parseTimeValue(offsetStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset")
@@ -258,7 +258,7 @@ public static final class DecayDateGauss {
double scaling;
public DecayDateGauss(String originStr, String scaleStr, String offsetStr, double decay) {
- this.origin = dateParser.parse(originStr, null, false, defaultZoneId);
+ this.origin = dateParser.parse(originStr, null, false, defaultZoneId).toEpochMilli();
long scale = TimeValue.parseTimeValue(scaleStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale")
.getMillis();
this.offset = TimeValue.parseTimeValue(offsetStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset")
diff --git a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java
index 900e1d7fd09ca..ceefe035d4613 100644
--- a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java
+++ b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java
@@ -21,6 +21,7 @@
import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.Version;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.StreamInput;
@@ -30,7 +31,6 @@
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.time.DateUtils;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.net.InetAddress;
@@ -38,6 +38,7 @@
import java.text.DecimalFormatSymbols;
import java.text.NumberFormat;
import java.text.ParseException;
+import java.time.Instant;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.Base64;
@@ -164,20 +165,24 @@ final class DateTime implements DocValueFormat {
public static final String NAME = "date_time";
final DateFormatter formatter;
- // TODO: change this to ZoneId, but will require careful change to serialization
- final DateTimeZone timeZone;
- private final ZoneId zoneId;
+ final ZoneId timeZone;
private final DateMathParser parser;
- public DateTime(DateFormatter formatter, DateTimeZone timeZone) {
- this.formatter = Objects.requireNonNull(formatter);
+ public DateTime(DateFormatter formatter, ZoneId timeZone) {
+ this.formatter = formatter;
this.timeZone = Objects.requireNonNull(timeZone);
- this.zoneId = DateUtils.dateTimeZoneToZoneId(timeZone);
this.parser = formatter.toDateMathParser();
}
public DateTime(StreamInput in) throws IOException {
- this(DateFormatter.forPattern(in.readString()), DateTimeZone.forID(in.readString()));
+ this.formatter = DateFormatter.forPattern(in.readString());
+ this.parser = formatter.toDateMathParser();
+ String zoneId = in.readString();
+ if (in.getVersion().before(Version.V_7_0_0)) {
+ this.timeZone = DateUtils.of(zoneId);
+ } else {
+ this.timeZone = ZoneId.of(zoneId);
+ }
}
@Override
@@ -188,12 +193,16 @@ public String getWriteableName() {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(formatter.pattern());
- out.writeString(timeZone.getID());
+ if (out.getVersion().before(Version.V_7_0_0)) {
+ out.writeString(DateUtils.zoneIdToDateTimeZone(timeZone).getID());
+ } else {
+ out.writeString(timeZone.getId());
+ }
}
@Override
public String format(long value) {
- return formatter.withZone(zoneId).formatMillis(value);
+ return formatter.format(Instant.ofEpochMilli(value).atZone(timeZone));
}
@Override
@@ -203,7 +212,7 @@ public String format(double value) {
@Override
public long parseLong(String value, boolean roundUp, LongSupplier now) {
- return parser.parse(value, now, roundUp, DateUtils.dateTimeZoneToZoneId(timeZone));
+ return parser.parse(value, now, roundUp, timeZone).toEpochMilli();
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java
index 28970ec828af9..53a7832884c76 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java
@@ -19,11 +19,12 @@
package org.elasticsearch.search.aggregations.bucket.composite;
+import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.rounding.DateTimeUnit;
-import org.elasticsearch.common.rounding.Rounding;
+import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -37,9 +38,10 @@
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.internal.SearchContext;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
import java.util.Objects;
import static org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder.DATE_FIELD_UNITS;
@@ -70,9 +72,9 @@ public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuild
}, Histogram.INTERVAL_FIELD, ObjectParser.ValueType.LONG);
PARSER.declareField(DateHistogramValuesSourceBuilder::timeZone, p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
- return DateTimeZone.forID(p.text());
+ return ZoneId.of(p.text());
} else {
- return DateTimeZone.forOffsetHours(p.intValue());
+ return ZoneOffset.ofHours(p.intValue());
}
}, new ParseField("time_zone"), ObjectParser.ValueType.LONG);
CompositeValuesSourceParserHelper.declareValuesSourceFields(PARSER, ValueType.NUMERIC);
@@ -82,7 +84,7 @@ static DateHistogramValuesSourceBuilder parse(String name, XContentParser parser
}
private long interval = 0;
- private DateTimeZone timeZone = null;
+ private ZoneId timeZone = null;
private DateHistogramInterval dateHistogramInterval;
public DateHistogramValuesSourceBuilder(String name) {
@@ -93,8 +95,10 @@ protected DateHistogramValuesSourceBuilder(StreamInput in) throws IOException {
super(in);
this.interval = in.readLong();
this.dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new);
- if (in.readBoolean()) {
- timeZone = DateTimeZone.forID(in.readString());
+ if (in.getVersion().before(Version.V_7_0_0)) {
+ this.timeZone = DateUtils.dateTimeZoneToZoneId(in.readOptionalTimeZone());
+ } else {
+ this.timeZone = in.readOptionalZoneId();
}
}
@@ -102,10 +106,10 @@ protected DateHistogramValuesSourceBuilder(StreamInput in) throws IOException {
protected void innerWriteTo(StreamOutput out) throws IOException {
out.writeLong(interval);
out.writeOptionalWriteable(dateHistogramInterval);
- boolean hasTimeZone = timeZone != null;
- out.writeBoolean(hasTimeZone);
- if (hasTimeZone) {
- out.writeString(timeZone.getID());
+ if (out.getVersion().before(Version.V_7_0_0)) {
+ out.writeOptionalTimeZone(DateUtils.zoneIdToDateTimeZone(timeZone));
+ } else {
+ out.writeOptionalZoneId(timeZone);
}
}
@@ -176,7 +180,7 @@ public DateHistogramValuesSourceBuilder dateHistogramInterval(DateHistogramInter
/**
* Sets the time zone to use for this aggregation
*/
- public DateHistogramValuesSourceBuilder timeZone(DateTimeZone timeZone) {
+ public DateHistogramValuesSourceBuilder timeZone(ZoneId timeZone) {
if (timeZone == null) {
throw new IllegalArgumentException("[timeZone] must not be null: [" + name + "]");
}
@@ -187,14 +191,14 @@ public DateHistogramValuesSourceBuilder timeZone(DateTimeZone timeZone) {
/**
* Gets the time zone to use for this aggregation
*/
- public DateTimeZone timeZone() {
+ public ZoneId timeZone() {
return timeZone;
}
private Rounding createRounding() {
Rounding.Builder tzRoundingBuilder;
if (dateHistogramInterval != null) {
- DateTimeUnit dateTimeUnit = DATE_FIELD_UNITS.get(dateHistogramInterval.toString());
+ Rounding.DateTimeUnit dateTimeUnit = DATE_FIELD_UNITS.get(dateHistogramInterval.toString());
if (dateTimeUnit != null) {
tzRoundingBuilder = Rounding.builder(dateTimeUnit);
} else {
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/RoundingValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/RoundingValuesSource.java
index 635690c44f49e..9ee142fcd2fd5 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/RoundingValuesSource.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/RoundingValuesSource.java
@@ -21,7 +21,7 @@
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
-import org.elasticsearch.common.rounding.Rounding;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.search.aggregations.support.ValuesSource;
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java
index 87ba80af9a4b0..794ce066ed76e 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java
@@ -20,11 +20,10 @@
package org.elasticsearch.search.aggregations.bucket.histogram;
import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
-import org.elasticsearch.common.rounding.DateTimeUnit;
-import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -42,9 +41,9 @@
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.internal.SearchContext;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Arrays;
import java.util.Map;
import java.util.Objects;
@@ -70,19 +69,19 @@ public class AutoDateHistogramAggregationBuilder
* The current implementation probably should not be invoked in a tight loop.
* @return Array of RoundingInfo
*/
- static RoundingInfo[] buildRoundings(DateTimeZone timeZone) {
+ static RoundingInfo[] buildRoundings(ZoneId timeZone) {
RoundingInfo[] roundings = new RoundingInfo[6];
- roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone),
- 1000L, "s" , 1, 5, 10, 30);
- roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone),
+ roundings[0] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.SECOND_OF_MINUTE, timeZone),
+ 1000L, "s", 1, 5, 10, 30);
+ roundings[1] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.MINUTES_OF_HOUR, timeZone),
60 * 1000L, "m", 1, 5, 10, 30);
- roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone),
- 60 * 60 * 1000L, "h", 1, 3, 12);
- roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH, timeZone),
+ roundings[2] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.HOUR_OF_DAY, timeZone),
+ 60 * 60 * 1000L, "h",1, 3, 12);
+ roundings[3] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.DAY_OF_MONTH, timeZone),
24 * 60 * 60 * 1000L, "d", 1, 7);
- roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR, timeZone),
+ roundings[4] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.MONTH_OF_YEAR, timeZone),
30 * 24 * 60 * 60 * 1000L, "M", 1, 3);
- roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY, timeZone),
+ roundings[5] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.YEAR_OF_CENTURY, timeZone),
365 * 24 * 60 * 60 * 1000L, "y", 1, 5, 10, 20, 50, 100);
return roundings;
}
@@ -156,7 +155,7 @@ public int getNumBuckets() {
return new AutoDateHistogramAggregatorFactory(name, config, numBuckets, roundings, context, parent, subFactoriesBuilder, metaData);
}
- static Rounding createRounding(DateTimeUnit interval, DateTimeZone timeZone) {
+ static Rounding createRounding(Rounding.DateTimeUnit interval, ZoneId timeZone) {
Rounding.Builder tzRoundingBuilder = Rounding.builder(interval);
if (timeZone != null) {
tzRoundingBuilder.timeZone(timeZone);
@@ -196,7 +195,7 @@ public RoundingInfo(Rounding rounding, long roughEstimateDurationMillis, String
}
public RoundingInfo(StreamInput in) throws IOException {
- rounding = Rounding.Streams.read(in);
+ rounding = Rounding.read(in);
roughEstimateDurationMillis = in.readVLong();
innerIntervals = in.readIntArray();
unitAbbreviation = in.readString();
@@ -204,7 +203,7 @@ public RoundingInfo(StreamInput in) throws IOException {
@Override
public void writeTo(StreamOutput out) throws IOException {
- Rounding.Streams.write(rounding, out);
+ rounding.writeTo(out);
out.writeVLong(roughEstimateDurationMillis);
out.writeIntArray(innerIntervals);
out.writeString(unitAbbreviation);
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java
index 81bb70bd9672a..1b982ea9deca2 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java
@@ -23,8 +23,8 @@
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.lease.Releasables;
-import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregator;
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java
index 5199313e0aca1..6d7852a864453 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java
@@ -23,10 +23,9 @@
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.search.DocIdSetIterator;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.rounding.DateTimeUnit;
-import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.unit.TimeValue;
@@ -54,10 +53,12 @@
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.internal.SearchContext;
-import org.joda.time.DateTimeField;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.zone.ZoneOffsetTransition;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -70,29 +71,30 @@
*/
public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuilder
implements MultiBucketAggregationBuilder {
+
public static final String NAME = "date_histogram";
private static DateMathParser EPOCH_MILLIS_PARSER = DateFormatter.forPattern("epoch_millis").toDateMathParser();
- public static final Map DATE_FIELD_UNITS;
+ public static final Map DATE_FIELD_UNITS;
static {
- Map dateFieldUnits = new HashMap<>();
- dateFieldUnits.put("year", DateTimeUnit.YEAR_OF_CENTURY);
- dateFieldUnits.put("1y", DateTimeUnit.YEAR_OF_CENTURY);
- dateFieldUnits.put("quarter", DateTimeUnit.QUARTER);
- dateFieldUnits.put("1q", DateTimeUnit.QUARTER);
- dateFieldUnits.put("month", DateTimeUnit.MONTH_OF_YEAR);
- dateFieldUnits.put("1M", DateTimeUnit.MONTH_OF_YEAR);
- dateFieldUnits.put("week", DateTimeUnit.WEEK_OF_WEEKYEAR);
- dateFieldUnits.put("1w", DateTimeUnit.WEEK_OF_WEEKYEAR);
- dateFieldUnits.put("day", DateTimeUnit.DAY_OF_MONTH);
- dateFieldUnits.put("1d", DateTimeUnit.DAY_OF_MONTH);
- dateFieldUnits.put("hour", DateTimeUnit.HOUR_OF_DAY);
- dateFieldUnits.put("1h", DateTimeUnit.HOUR_OF_DAY);
- dateFieldUnits.put("minute", DateTimeUnit.MINUTES_OF_HOUR);
- dateFieldUnits.put("1m", DateTimeUnit.MINUTES_OF_HOUR);
- dateFieldUnits.put("second", DateTimeUnit.SECOND_OF_MINUTE);
- dateFieldUnits.put("1s", DateTimeUnit.SECOND_OF_MINUTE);
+ Map dateFieldUnits = new HashMap<>();
+ dateFieldUnits.put("year", Rounding.DateTimeUnit.YEAR_OF_CENTURY);
+ dateFieldUnits.put("1y", Rounding.DateTimeUnit.YEAR_OF_CENTURY);
+ dateFieldUnits.put("quarter", Rounding.DateTimeUnit.QUARTER_OF_YEAR);
+ dateFieldUnits.put("1q", Rounding.DateTimeUnit.QUARTER_OF_YEAR);
+ dateFieldUnits.put("month", Rounding.DateTimeUnit.MONTH_OF_YEAR);
+ dateFieldUnits.put("1M", Rounding.DateTimeUnit.MONTH_OF_YEAR);
+ dateFieldUnits.put("week", Rounding.DateTimeUnit.WEEK_OF_WEEKYEAR);
+ dateFieldUnits.put("1w", Rounding.DateTimeUnit.WEEK_OF_WEEKYEAR);
+ dateFieldUnits.put("day", Rounding.DateTimeUnit.DAY_OF_MONTH);
+ dateFieldUnits.put("1d", Rounding.DateTimeUnit.DAY_OF_MONTH);
+ dateFieldUnits.put("hour", Rounding.DateTimeUnit.HOUR_OF_DAY);
+ dateFieldUnits.put("1h", Rounding.DateTimeUnit.HOUR_OF_DAY);
+ dateFieldUnits.put("minute", Rounding.DateTimeUnit.MINUTES_OF_HOUR);
+ dateFieldUnits.put("1m", Rounding.DateTimeUnit.MINUTES_OF_HOUR);
+ dateFieldUnits.put("second", Rounding.DateTimeUnit.SECOND_OF_MINUTE);
+ dateFieldUnits.put("1s", Rounding.DateTimeUnit.SECOND_OF_MINUTE);
DATE_FIELD_UNITS = unmodifiableMap(dateFieldUnits);
}
@@ -369,11 +371,11 @@ public String getType() {
* coordinating node in order to generate missing buckets, which may cross a transition
* even though data on the shards doesn't.
*/
- DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException {
- final DateTimeZone tz = timeZone();
+ ZoneId rewriteTimeZone(QueryShardContext context) throws IOException {
+ final ZoneId tz = timeZone();
if (field() != null &&
tz != null &&
- tz.isFixed() == false &&
+ tz.getRules().isFixedOffset() == false &&
field() != null &&
script() == null) {
final MappedFieldType ft = context.fieldMapper(field());
@@ -391,16 +393,29 @@ DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException {
}
if (anyInstant != null) {
- final long prevTransition = tz.previousTransition(anyInstant);
- final long nextTransition = tz.nextTransition(anyInstant);
+ Instant instant = Instant.ofEpochMilli(anyInstant);
+ ZoneOffsetTransition prevOffsetTransition = tz.getRules().previousTransition(instant);
+ final long prevTransition;
+ if (prevOffsetTransition != null) {
+ prevTransition = prevOffsetTransition.getInstant().toEpochMilli();
+ } else {
+ prevTransition = instant.toEpochMilli();
+ }
+ ZoneOffsetTransition nextOffsetTransition = tz.getRules().nextTransition(instant);
+ final long nextTransition;
+ if (nextOffsetTransition != null) {
+ nextTransition = nextOffsetTransition.getInstant().toEpochMilli();
+ } else {
+ nextTransition = instant.toEpochMilli();
+ }
// We need all not only values but also rounded values to be within
// [prevTransition, nextTransition].
final long low;
- DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit();
+ Rounding.DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit();
if (intervalAsUnit != null) {
- final DateTimeField dateTimeField = intervalAsUnit.field(tz);
- low = dateTimeField.roundCeiling(prevTransition);
+ Rounding rounding = Rounding.builder(intervalAsUnit).timeZone(timeZone()).build();
+ low = rounding.nextRoundingValue(prevTransition);
} else {
final TimeValue intervalAsMillis = getIntervalAsTimeValue();
low = Math.addExact(prevTransition, intervalAsMillis.millis());
@@ -408,12 +423,12 @@ DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException {
// rounding rounds down, so 'nextTransition' is a good upper bound
final long high = nextTransition;
- if (ft.isFieldWithinQuery(reader, low, high, true, false, DateTimeZone.UTC, EPOCH_MILLIS_PARSER,
+ if (ft.isFieldWithinQuery(reader, low, high, true, false, ZoneOffset.UTC, EPOCH_MILLIS_PARSER,
context) == Relation.WITHIN) {
// All values in this reader have the same offset despite daylight saving times.
// This is very common for location-based timezones such as Europe/Paris in
// combination with time-based indices.
- return DateTimeZone.forOffsetMillis(tz.getOffset(anyInstant));
+ return ZoneOffset.ofTotalSeconds(tz.getRules().getOffset(instant).getTotalSeconds());
}
}
}
@@ -424,9 +439,9 @@ DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException {
@Override
protected ValuesSourceAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig config,
AggregatorFactory> parent, Builder subFactoriesBuilder) throws IOException {
- final DateTimeZone tz = timeZone();
+ final ZoneId tz = timeZone();
final Rounding rounding = createRounding(tz);
- final DateTimeZone rewrittenTimeZone = rewriteTimeZone(context.getQueryShardContext());
+ final ZoneId rewrittenTimeZone = rewriteTimeZone(context.getQueryShardContext());
final Rounding shardRounding;
if (tz == rewrittenTimeZone) {
shardRounding = rounding;
@@ -447,7 +462,7 @@ DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException {
* {@code null} then it means that the interval is expressed as a fixed
* {@link TimeValue} and may be accessed via
* {@link #getIntervalAsTimeValue()}. */
- private DateTimeUnit getIntervalAsDateTimeUnit() {
+ private Rounding.DateTimeUnit getIntervalAsDateTimeUnit() {
if (dateHistogramInterval != null) {
return DATE_FIELD_UNITS.get(dateHistogramInterval.toString());
}
@@ -466,9 +481,9 @@ private TimeValue getIntervalAsTimeValue() {
}
}
- private Rounding createRounding(DateTimeZone timeZone) {
+ private Rounding createRounding(ZoneId timeZone) {
Rounding.Builder tzRoundingBuilder;
- DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit();
+ Rounding.DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit();
if (intervalAsUnit != null) {
tzRoundingBuilder = Rounding.builder(intervalAsUnit);
} else {
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java
index 735a6717210a5..0c7a91505ae88 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java
@@ -23,8 +23,8 @@
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.lease.Releasables;
-import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregator;
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java
index c7ad6de7e0d72..8c025eb34eeb3 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java
@@ -19,7 +19,7 @@
package org.elasticsearch.search.aggregations.bucket.histogram;
-import org.elasticsearch.common.rounding.Rounding;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory;
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBounds.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBounds.java
index 4cecfeff83381..b0dfbb9d66e9d 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBounds.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBounds.java
@@ -21,10 +21,10 @@
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
-import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentFragment;
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java
index f2e450942c3ad..63d08f5e832ac 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java
@@ -19,9 +19,9 @@
package org.elasticsearch.search.aggregations.bucket.histogram;
import org.apache.lucene.util.PriorityQueue;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregations;
@@ -32,10 +32,10 @@
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -108,7 +108,7 @@ public String getKeyAsString() {
@Override
public Object getKey() {
- return new DateTime(key, DateTimeZone.UTC);
+ return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC);
}
@Override
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java
index 496f8efc60ccf..2fa7f15a703ec 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java
@@ -20,9 +20,9 @@
import org.apache.lucene.util.CollectionUtil;
import org.apache.lucene.util.PriorityQueue;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregations;
@@ -34,10 +34,10 @@
import org.elasticsearch.search.aggregations.KeyComparable;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
@@ -112,7 +112,7 @@ public String getKeyAsString() {
@Override
public Object getKey() {
- return new DateTime(key, DateTimeZone.UTC);
+ return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC);
}
@Override
@@ -185,13 +185,13 @@ static class EmptyBucketInfo {
}
EmptyBucketInfo(StreamInput in) throws IOException {
- rounding = Rounding.Streams.read(in);
+ rounding = Rounding.read(in);
subAggregations = InternalAggregations.readAggregations(in);
bounds = in.readOptionalWriteable(ExtendedBounds::new);
}
void writeTo(StreamOutput out) throws IOException {
- Rounding.Streams.write(rounding, out);
+ rounding.writeTo(out);
subAggregations.writeTo(out);
out.writeOptionalWriteable(bounds);
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java
index c9ff1389f8ad3..66a29b4e05073 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java
@@ -24,10 +24,10 @@
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneOffset;
import java.util.List;
public class ParsedAutoDateHistogram extends ParsedMultiBucketAggregation implements Histogram {
@@ -83,7 +83,7 @@ public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBuck
@Override
public Object getKey() {
if (key != null) {
- return new DateTime(key, DateTimeZone.UTC);
+ return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC);
}
return null;
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java
index ace0cb59907a8..1cf43a53ed26c 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java
@@ -23,10 +23,10 @@
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneOffset;
import java.util.List;
public class ParsedDateHistogram extends ParsedMultiBucketAggregation implements Histogram {
@@ -62,7 +62,7 @@ public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBuck
@Override
public Object getKey() {
if (key != null) {
- return new DateTime(key, DateTimeZone.UTC);
+ return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC);
}
return null;
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java
index b5bdba85b78ef..2b5e92ddcb3f9 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java
@@ -30,9 +30,9 @@
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
import org.elasticsearch.search.internal.SearchContext;
-import org.joda.time.DateTime;
import java.io.IOException;
+import java.time.ZonedDateTime;
import java.util.Map;
public class DateRangeAggregationBuilder extends AbstractRangeBuilder {
@@ -224,24 +224,24 @@ public DateRangeAggregationBuilder addUnboundedFrom(double from) {
* @param to
* the upper bound on the dates, exclusive
*/
- public DateRangeAggregationBuilder addRange(String key, DateTime from, DateTime to) {
+ public DateRangeAggregationBuilder addRange(String key, ZonedDateTime from, ZonedDateTime to) {
addRange(new RangeAggregator.Range(key, convertDateTime(from), convertDateTime(to)));
return this;
}
- private static Double convertDateTime(DateTime dateTime) {
+ private static Double convertDateTime(ZonedDateTime dateTime) {
if (dateTime == null) {
return null;
} else {
- return (double) dateTime.getMillis();
+ return (double) dateTime.toInstant().toEpochMilli();
}
}
/**
- * Same as {@link #addRange(String, DateTime, DateTime)} but the key will be
+ * Same as {@link #addRange(String, ZonedDateTime, ZonedDateTime)} but the key will be
* automatically generated based on from
and to
.
*/
- public DateRangeAggregationBuilder addRange(DateTime from, DateTime to) {
+ public DateRangeAggregationBuilder addRange(ZonedDateTime from, ZonedDateTime to) {
return addRange(null, from, to);
}
@@ -253,16 +253,16 @@ public DateRangeAggregationBuilder addRange(DateTime from, DateTime to) {
* @param to
* the upper bound on the dates, exclusive
*/
- public DateRangeAggregationBuilder addUnboundedTo(String key, DateTime to) {
+ public DateRangeAggregationBuilder addUnboundedTo(String key, ZonedDateTime to) {
addRange(new RangeAggregator.Range(key, null, convertDateTime(to)));
return this;
}
/**
- * Same as {@link #addUnboundedTo(String, DateTime)} but the key will be
+ * Same as {@link #addUnboundedTo(String, ZonedDateTime)} but the key will be
* computed automatically.
*/
- public DateRangeAggregationBuilder addUnboundedTo(DateTime to) {
+ public DateRangeAggregationBuilder addUnboundedTo(ZonedDateTime to) {
return addUnboundedTo(null, to);
}
@@ -274,16 +274,16 @@ public DateRangeAggregationBuilder addUnboundedTo(DateTime to) {
* @param from
* the lower bound on the distances, inclusive
*/
- public DateRangeAggregationBuilder addUnboundedFrom(String key, DateTime from) {
+ public DateRangeAggregationBuilder addUnboundedFrom(String key, ZonedDateTime from) {
addRange(new RangeAggregator.Range(key, convertDateTime(from), null));
return this;
}
/**
- * Same as {@link #addUnboundedFrom(String, DateTime)} but the key will be
+ * Same as {@link #addUnboundedFrom(String, ZonedDateTime)} but the key will be
* computed automatically.
*/
- public DateRangeAggregationBuilder addUnboundedFrom(DateTime from) {
+ public DateRangeAggregationBuilder addUnboundedFrom(ZonedDateTime from) {
return addUnboundedFrom(null, from);
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java
index 408c1325b85c9..a354aaeadbac0 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java
@@ -24,10 +24,10 @@
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.ValueType;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneOffset;
import java.util.List;
import java.util.Map;
@@ -48,12 +48,14 @@ public Bucket(String key, double from, double to, long docCount, InternalAggrega
@Override
public Object getFrom() {
- return Double.isInfinite(((Number) from).doubleValue()) ? null : new DateTime(((Number) from).longValue(), DateTimeZone.UTC);
+ return Double.isInfinite(((Number) from).doubleValue()) ? null :
+ Instant.ofEpochMilli(((Number) from).longValue()).atZone(ZoneOffset.UTC);
}
@Override
public Object getTo() {
- return Double.isInfinite(((Number) to).doubleValue()) ? null : new DateTime(((Number) to).longValue(), DateTimeZone.UTC);
+ return Double.isInfinite(((Number) to).doubleValue()) ? null :
+ Instant.ofEpochMilli(((Number) to).longValue()).atZone(ZoneOffset.UTC);
}
private Double internalGetFrom() {
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java
index 68adc41d23765..d4504e245541b 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java
@@ -21,10 +21,11 @@
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
public class ParsedDateRange extends ParsedRange {
@@ -59,11 +60,11 @@ public Object getTo() {
return doubleAsDateTime(to);
}
- private static DateTime doubleAsDateTime(Double d) {
+ private static ZonedDateTime doubleAsDateTime(Double d) {
if (d == null || Double.isInfinite(d)) {
return null;
}
- return new DateTime(d.longValue(), DateTimeZone.UTC);
+ return Instant.ofEpochMilli(d.longValue()).atZone(ZoneOffset.UTC);
}
static ParsedBucket fromXContent(final XContentParser parser, final boolean keyed) throws IOException {
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java
index b8785d0bf7045..68ec9085df52a 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java
@@ -21,9 +21,9 @@
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.rounding.DateTimeUnit;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
@@ -34,7 +34,6 @@
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.util.ArrayList;
@@ -139,9 +138,9 @@ protected PipelineAggregator createInternal(Map metaData) throws
}
Long xAxisUnits = null;
if (units != null) {
- DateTimeUnit dateTimeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(units);
+ Rounding.DateTimeUnit dateTimeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(units);
if (dateTimeUnit != null) {
- xAxisUnits = dateTimeUnit.field(DateTimeZone.UTC).getDurationField().getUnitMillis();
+ xAxisUnits = dateTimeUnit.getField().getBaseUnit().getDuration().toMillis();
} else {
TimeValue timeValue = TimeValue.parseTimeValue(units, null, getClass().getSimpleName() + ".unit");
if (timeValue != null) {
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java
index fbc3081758f96..de112c427a751 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java
@@ -19,19 +19,22 @@
package org.elasticsearch.search.aggregations.support;
+import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
import java.util.Objects;
import java.util.function.BiFunction;
@@ -39,7 +42,7 @@ public class MultiValuesSourceFieldConfig implements Writeable, ToXContentObject
private String fieldName;
private Object missing;
private Script script;
- private DateTimeZone timeZone;
+ private ZoneId timeZone;
private static final String NAME = "field_config";
@@ -62,16 +65,16 @@ public class MultiValuesSourceFieldConfig implements Writeable, ToXContentObject
if (timezoneAware) {
parser.declareField(MultiValuesSourceFieldConfig.Builder::setTimeZone, p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
- return DateTimeZone.forID(p.text());
+ return ZoneId.of(p.text());
} else {
- return DateTimeZone.forOffsetHours(p.intValue());
+ return ZoneOffset.ofHours(p.intValue());
}
}, ParseField.CommonFields.TIME_ZONE, ObjectParser.ValueType.LONG);
}
return parser;
};
- private MultiValuesSourceFieldConfig(String fieldName, Object missing, Script script, DateTimeZone timeZone) {
+ private MultiValuesSourceFieldConfig(String fieldName, Object missing, Script script, ZoneId timeZone) {
this.fieldName = fieldName;
this.missing = missing;
this.script = script;
@@ -82,7 +85,11 @@ public MultiValuesSourceFieldConfig(StreamInput in) throws IOException {
this.fieldName = in.readString();
this.missing = in.readGenericValue();
this.script = in.readOptionalWriteable(Script::new);
- this.timeZone = in.readOptionalTimeZone();
+ if (in.getVersion().before(Version.V_7_0_0)) {
+ this.timeZone = DateUtils.dateTimeZoneToZoneId(in.readOptionalTimeZone());
+ } else {
+ this.timeZone = in.readOptionalZoneId();
+ }
}
public Object getMissing() {
@@ -93,7 +100,7 @@ public Script getScript() {
return script;
}
- public DateTimeZone getTimeZone() {
+ public ZoneId getTimeZone() {
return timeZone;
}
@@ -106,7 +113,11 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
out.writeGenericValue(missing);
out.writeOptionalWriteable(script);
- out.writeOptionalTimeZone(timeZone);
+ if (out.getVersion().before(Version.V_7_0_0)) {
+ out.writeOptionalTimeZone(DateUtils.zoneIdToDateTimeZone(timeZone));
+ } else {
+ out.writeOptionalZoneId(timeZone);
+ }
}
@Override
@@ -122,7 +133,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.field(ParseField.CommonFields.FIELD.getPreferredName(), fieldName);
}
if (timeZone != null) {
- builder.field(ParseField.CommonFields.TIME_ZONE.getPreferredName(), timeZone.getID());
+ builder.field(ParseField.CommonFields.TIME_ZONE.getPreferredName(), timeZone.getId());
}
builder.endObject();
return builder;
@@ -153,7 +164,7 @@ public static class Builder {
private String fieldName;
private Object missing = null;
private Script script = null;
- private DateTimeZone timeZone = null;
+ private ZoneId timeZone = null;
public String getFieldName() {
return fieldName;
@@ -182,11 +193,11 @@ public Builder setScript(Script script) {
return this;
}
- public DateTimeZone getTimeZone() {
+ public ZoneId getTimeZone() {
return timeZone;
}
- public Builder setTimeZone(DateTimeZone timeZone) {
+ public Builder setTimeZone(ZoneId timeZone) {
this.timeZone = timeZone;
return this;
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java
index 25a90e581f00c..3cbd11288bffc 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java
@@ -28,9 +28,9 @@
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.search.DocValueFormat;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneOffset;
public enum ValueType implements Writeable {
@@ -42,7 +42,7 @@ public enum ValueType implements Writeable {
DOUBLE((byte) 3, "float|double", "double", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW),
NUMBER((byte) 4, "number", "number", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW),
DATE((byte) 5, "date", "date", ValuesSourceType.NUMERIC, IndexNumericFieldData.class,
- new DocValueFormat.DateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, DateTimeZone.UTC)),
+ new DocValueFormat.DateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, ZoneOffset.UTC)),
IP((byte) 6, "ip", "ip", ValuesSourceType.BYTES, IndexFieldData.class, DocValueFormat.IP),
// TODO: what is the difference between "number" and "numeric"?
NUMERIC((byte) 7, "numeric", "numeric", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW),
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java
index 040cc1b542f07..d3abe6f3169ee 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java
@@ -18,8 +18,10 @@
*/
package org.elasticsearch.search.aggregations.support;
+import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
@@ -28,9 +30,9 @@
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.internal.SearchContext;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Map;
import java.util.Objects;
@@ -81,7 +83,7 @@ public final AB subAggregations(Builder subFactories) {
private ValueType valueType = null;
private String format = null;
private Object missing = null;
- private DateTimeZone timeZone = null;
+ private ZoneId timeZone = null;
protected ValuesSourceConfig config;
protected ValuesSourceAggregationBuilder(String name, ValuesSourceType valuesSourceType, ValueType targetValueType) {
@@ -144,8 +146,10 @@ private void read(StreamInput in) throws IOException {
}
format = in.readOptionalString();
missing = in.readGenericValue();
- if (in.readBoolean()) {
- timeZone = DateTimeZone.forID(in.readString());
+ if (in.getVersion().before(Version.V_7_0_0)) {
+ timeZone = DateUtils.dateTimeZoneToZoneId(in.readOptionalTimeZone());
+ } else {
+ timeZone = in.readOptionalZoneId();
}
}
@@ -167,10 +171,10 @@ protected final void doWriteTo(StreamOutput out) throws IOException {
}
out.writeOptionalString(format);
out.writeGenericValue(missing);
- boolean hasTimeZone = timeZone != null;
- out.writeBoolean(hasTimeZone);
- if (hasTimeZone) {
- out.writeString(timeZone.getID());
+ if (out.getVersion().before(Version.V_7_0_0)) {
+ out.writeOptionalTimeZone(DateUtils.zoneIdToDateTimeZone(timeZone));
+ } else {
+ out.writeOptionalZoneId(timeZone);
}
innerWriteTo(out);
}
@@ -289,7 +293,7 @@ public Object missing() {
* Sets the time zone to use for this aggregation
*/
@SuppressWarnings("unchecked")
- public AB timeZone(DateTimeZone timeZone) {
+ public AB timeZone(ZoneId timeZone) {
if (timeZone == null) {
throw new IllegalArgumentException("[timeZone] must not be null: [" + name + "]");
}
@@ -300,7 +304,7 @@ public AB timeZone(DateTimeZone timeZone) {
/**
* Gets the time zone to use for this aggregation
*/
- public DateTimeZone timeZone() {
+ public ZoneId timeZone() {
return timeZone;
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java
index 39c53f39c7dac..82baa04fe8f1a 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java
@@ -32,7 +32,9 @@
import org.elasticsearch.script.Script;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
-import org.joda.time.DateTimeZone;
+
+import java.time.ZoneId;
+import java.time.ZoneOffset;
/**
* A configuration that tells aggregations how to retrieve data from the index
@@ -48,7 +50,7 @@ public static ValuesSourceConfig resolve(
ValueType valueType,
String field, Script script,
Object missing,
- DateTimeZone timeZone,
+ ZoneId timeZone,
String format) {
if (field == null) {
@@ -121,7 +123,7 @@ private static AggregationScript.LeafFactory createScript(Script script, QuerySh
}
}
- private static DocValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType, @Nullable DateTimeZone tz) {
+ private static DocValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType, @Nullable ZoneId tz) {
if (valueType == null) {
return DocValueFormat.RAW; // we can't figure it out
}
@@ -130,7 +132,7 @@ private static DocValueFormat resolveFormat(@Nullable String format, @Nullable V
valueFormat = new DocValueFormat.Decimal(format);
}
if (valueFormat instanceof DocValueFormat.DateTime && format != null) {
- valueFormat = new DocValueFormat.DateTime(DateFormatter.forPattern(format), tz != null ? tz : DateTimeZone.UTC);
+ valueFormat = new DocValueFormat.DateTime(DateFormatter.forPattern(format), tz != null ? tz : ZoneOffset.UTC);
}
return valueFormat;
}
@@ -142,7 +144,7 @@ private static DocValueFormat resolveFormat(@Nullable String format, @Nullable V
private boolean unmapped = false;
private DocValueFormat format = DocValueFormat.RAW;
private Object missing;
- private DateTimeZone timeZone;
+ private ZoneId timeZone;
public ValuesSourceConfig(ValuesSourceType valueSourceType) {
this.valueSourceType = valueSourceType;
@@ -206,12 +208,12 @@ public Object missing() {
return this.missing;
}
- public ValuesSourceConfig timezone(final DateTimeZone timeZone) {
- this.timeZone= timeZone;
+ public ValuesSourceConfig timezone(final ZoneId timeZone) {
+ this.timeZone = timeZone;
return this;
}
- public DateTimeZone timezone() {
+ public ZoneId timezone() {
return this.timeZone;
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParserHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParserHelper.java
index fc0a2f3a9fefe..24bdffaa3fa89 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParserHelper.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParserHelper.java
@@ -25,7 +25,9 @@
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
-import org.joda.time.DateTimeZone;
+
+import java.time.ZoneId;
+import java.time.ZoneOffset;
public final class ValuesSourceParserHelper {
@@ -91,9 +93,9 @@ private static void declareFields(
if (timezoneAware) {
objectParser.declareField(ValuesSourceAggregationBuilder::timeZone, p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
- return DateTimeZone.forID(p.text());
+ return ZoneId.of(p.text());
} else {
- return DateTimeZone.forOffsetHours(p.intValue());
+ return ZoneOffset.ofHours(p.intValue());
}
}, ParseField.CommonFields.TIME_ZONE, ObjectParser.ValueType.LONG);
}
diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java
index c1692f606178e..cbd4ff659e599 100644
--- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java
+++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java
@@ -28,7 +28,6 @@
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.time.DateFormatter;
-import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
@@ -52,7 +51,7 @@ public final class SnapshotInfo implements Comparable, ToXContent,
public static final String CONTEXT_MODE_PARAM = "context_mode";
public static final String CONTEXT_MODE_SNAPSHOT = "SNAPSHOT";
- private static final DateFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("strictDateOptionalTime");
+ private static final DateFormatter DATE_TIME_FORMATTER = DateFormatter.forPattern("strictDateOptionalTime");
private static final String SNAPSHOT = "snapshot";
private static final String UUID = "uuid";
private static final String INDICES = "indices";
diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java
index 6b8d1ab4fafb7..9f6f19596d080 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java
@@ -25,7 +25,7 @@
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.time.DateFormatters;
+import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
@@ -276,7 +276,7 @@ public void testRolloverOnExistingIndex() throws Exception {
public void testRolloverWithDateMath() {
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
assumeTrue("only works on the same day", now.plusMinutes(5).getDayOfYear() == now.getDayOfYear());
- String index = "test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-1";
+ String index = "test-" + DateFormatter.forPattern("YYYY.MM.dd").format(now) + "-1";
String dateMathExp = "";
assertAcked(prepareCreate(dateMathExp).addAlias(new Alias("test_alias")).get());
ensureGreen(index);
@@ -290,14 +290,14 @@ public void testRolloverWithDateMath() {
ensureGreen(index);
RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias").get();
assertThat(response.getOldIndex(), equalTo(index));
- assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000002"));
+ assertThat(response.getNewIndex(), equalTo("test-" + DateFormatter.forPattern("YYYY.MM").format(now) + "-000002"));
assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0));
response = client().admin().indices().prepareRolloverIndex("test_alias").get();
- assertThat(response.getOldIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000002"));
- assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000003"));
+ assertThat(response.getOldIndex(), equalTo("test-" + DateFormatter.forPattern("YYYY.MM").format(now) + "-000002"));
+ assertThat(response.getNewIndex(), equalTo("test-" + DateFormatter.forPattern("YYYY.MM").format(now) + "-000003"));
assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0));
@@ -310,8 +310,8 @@ public void testRolloverWithDateMath() {
IndexMetaData.SETTING_INDEX_PROVIDED_NAME));
response = client().admin().indices().prepareRolloverIndex("test_alias").setNewIndexName("").get();
- assertThat(response.getOldIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000003"));
- assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-000004"));
+ assertThat(response.getOldIndex(), equalTo("test-" + DateFormatter.forPattern("YYYY.MM").format(now) + "-000003"));
+ assertThat(response.getNewIndex(), equalTo("test-" + DateFormatter.forPattern("YYYY.MM.dd").format(now) + "-000004"));
assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0));
diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java
index 5c67e1bbe566c..2f52bd0d40aae 100644
--- a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java
+++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java
@@ -93,25 +93,25 @@ public void testExpression_MultiParts() throws Exception {
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37037")
public void testExpression_CustomFormat() throws Exception {
- List results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{YYYY.MM.dd}}>"));
+ List results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd}}>"));
assertThat(results.size(), equalTo(1));
assertThat(results.get(0),
- equalTo(".marvel-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(context.getStartTime(), UTC))));
+ equalTo(".marvel-" + DateTimeFormat.forPattern("yyyy.MM.dd").print(new DateTime(context.getStartTime(), UTC))));
}
public void testExpression_EscapeStatic() throws Exception {
List result = expressionResolver.resolve(context, Arrays.asList("<.mar\\{v\\}el-{now/d}>"));
assertThat(result.size(), equalTo(1));
assertThat(result.get(0),
- equalTo(".mar{v}el-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(context.getStartTime(), UTC))));
+ equalTo(".mar{v}el-" + DateTimeFormat.forPattern("yyyy.MM.dd").print(new DateTime(context.getStartTime(), UTC))));
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37037")
public void testExpression_EscapeDateFormat() throws Exception {
- List result = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'YYYY}}>"));
+ List result = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'yyyy}}>"));
assertThat(result.size(), equalTo(1));
assertThat(result.get(0),
- equalTo(".marvel-" + DateTimeFormat.forPattern("'{year}'YYYY").print(new DateTime(context.getStartTime(), UTC))));
+ equalTo(".marvel-" + DateTimeFormat.forPattern("'{year}'yyyy").print(new DateTime(context.getStartTime(), UTC))));
}
public void testExpression_MixedArray() throws Exception {
@@ -150,10 +150,10 @@ public void testExpression_CustomTimeZoneInIndexName() throws Exception {
now = DateTime.now(UTC).withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0);
}
Context context = new Context(this.context.getState(), this.context.getOptions(), now.getMillis());
- List results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{YYYY.MM.dd|" + timeZone.getID() + "}}>"));
+ List results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getID() + "}}>"));
assertThat(results.size(), equalTo(1));
logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, results.get(0));
- assertThat(results.get(0), equalTo(".marvel-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now.withZone(timeZone))));
+ assertThat(results.get(0), equalTo(".marvel-" + DateTimeFormat.forPattern("yyyy.MM.dd").print(now.withZone(timeZone))));
}
public void testExpressionInvalidUnescaped() throws Exception {
diff --git a/server/src/test/java/org/elasticsearch/common/RoundingTests.java b/server/src/test/java/org/elasticsearch/common/RoundingTests.java
index 1664f67a44df9..9bc7c10abd8c8 100644
--- a/server/src/test/java/org/elasticsearch/common/RoundingTests.java
+++ b/server/src/test/java/org/elasticsearch/common/RoundingTests.java
@@ -21,6 +21,7 @@
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.rounding.DateTimeUnit;
+import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
@@ -317,7 +318,7 @@ public void testIntervalRounding_HalfDay_DST() {
}
/**
- * randomized test on {@link org.elasticsearch.common.rounding.Rounding.TimeIntervalRounding} with random interval and time zone offsets
+ * randomized test on {@link org.elasticsearch.common.Rounding.TimeIntervalRounding} with random interval and time zone offsets
*/
public void testIntervalRoundingRandom() {
for (int i = 0; i < 1000; i++) {
@@ -728,7 +729,7 @@ private static long time(String time) {
}
private static long time(String time, ZoneId zone) {
- TemporalAccessor accessor = DateFormatters.forPattern("date_optional_time").withZone(zone).parse(time);
+ TemporalAccessor accessor = DateFormatter.forPattern("date_optional_time").withZone(zone).parse(time);
return DateFormatters.toZonedDateTime(accessor).toInstant().toEpochMilli();
}
diff --git a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java
index b2370dadb604c..c7abea63be081 100644
--- a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java
+++ b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java
@@ -29,13 +29,11 @@
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
-import java.time.format.DateTimeParseException;
import java.time.temporal.TemporalAccessor;
import java.util.Locale;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
-import static org.hamcrest.Matchers.startsWith;
public class JavaJodaTimeDuellingTests extends ESTestCase {
@@ -64,11 +62,22 @@ public void testTimeZoneFormatting() {
formatter3.parse("20181126T121212.123-0830");
}
- public void testCustomTimeFormats() {
- assertSameDate("2010 12 06 11:05:15", "yyyy dd MM HH:mm:ss");
- assertSameDate("12/06", "dd/MM");
- assertSameDate("Nov 24 01:29:01 -0800", "MMM dd HH:mm:ss Z");
- }
+ // this test requires tests to run with -Djava.locale.providers=COMPAT in order to work
+// public void testCustomTimeFormats() {
+// assertSameDate("2010 12 06 11:05:15", "yyyy dd MM HH:mm:ss");
+// assertSameDate("12/06", "dd/MM");
+// assertSameDate("Nov 24 01:29:01 -0800", "MMM dd HH:mm:ss Z");
+//
+// // also ensure that locale based dates are the same
+// assertSameDate("Di., 05 Dez. 2000 02:55:00 -0800", "E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de"));
+// assertSameDate("Mi., 06 Dez. 2000 02:55:00 -0800", "E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de"));
+// assertSameDate("Do., 07 Dez. 2000 00:00:00 -0800", "E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de"));
+// assertSameDate("Fr., 08 Dez. 2000 00:00:00 -0800", "E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de"));
+//
+// DateTime dateTimeNow = DateTime.now(DateTimeZone.UTC);
+// ZonedDateTime javaTimeNow = Instant.ofEpochMilli(dateTimeNow.getMillis()).atZone(ZoneOffset.UTC);
+// assertSamePrinterOutput("E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de"), javaTimeNow, dateTimeNow);
+// }
public void testDuellingFormatsValidParsing() {
assertSameDate("1522332219", "epoch_second");
@@ -133,10 +142,6 @@ public void testDuellingFormatsValidParsing() {
assertSameDate("2018-12-31T12:12:12.1", "date_hour_minute_second_millis");
assertSameDate("2018-12-31T12:12:12.1", "date_hour_minute_second_fraction");
- assertSameDate("10000", "date_optional_time");
- assertSameDate("10000T", "date_optional_time");
- assertSameDate("2018", "date_optional_time");
- assertSameDate("2018T", "date_optional_time");
assertSameDate("2018-05", "date_optional_time");
assertSameDate("2018-05-30", "date_optional_time");
assertSameDate("2018-05-30T20", "date_optional_time");
@@ -278,7 +283,7 @@ public void testDuellingFormatsValidParsing() {
// joda comes up with a different exception message here, so we have to adapt
assertJodaParseException("2012-W1-8", "week_date",
"Cannot parse \"2012-W1-8\": Value 8 for dayOfWeek must be in the range [1,7]");
- assertJavaTimeParseException("2012-W1-8", "week_date", "Text '2012-W1-8' could not be parsed");
+ assertJavaTimeParseException("2012-W1-8", "week_date");
assertSameDate("2012-W48-6T10:15:30.123Z", "week_date_time");
assertSameDate("2012-W48-6T10:15:30.123+0100", "week_date_time");
@@ -358,6 +363,7 @@ public void testDuelingStrictParsing() {
assertParseException("2018-12-1", "strict_date_optional_time");
assertParseException("2018-1-31", "strict_date_optional_time");
assertParseException("10000-01-31", "strict_date_optional_time");
+ assertSameDate("2010-01-05T02:00", "strict_date_optional_time");
assertSameDate("2018-12-31T10:15:30", "strict_date_optional_time");
assertSameDate("2018-12-31T10:15:30Z", "strict_date_optional_time");
assertSameDate("2018-12-31T10:15:30+0100", "strict_date_optional_time");
@@ -365,6 +371,7 @@ public void testDuelingStrictParsing() {
assertParseException("2018-12-31T10:15:3", "strict_date_optional_time");
assertParseException("2018-12-31T10:5:30", "strict_date_optional_time");
assertParseException("2018-12-31T9:15:30", "strict_date_optional_time");
+ assertSameDate("2015-01-04T00:00Z", "strict_date_optional_time");
assertSameDate("2018-12-31T10:15:30.123Z", "strict_date_time");
assertSameDate("2018-12-31T10:15:30.123+0100", "strict_date_time");
assertSameDate("2018-12-31T10:15:30.123+01:00", "strict_date_time");
@@ -456,7 +463,7 @@ public void testDuelingStrictParsing() {
// joda comes up with a different exception message here, so we have to adapt
assertJodaParseException("2012-W01-8", "strict_week_date",
"Cannot parse \"2012-W01-8\": Value 8 for dayOfWeek must be in the range [1,7]");
- assertJavaTimeParseException("2012-W01-8", "strict_week_date", "Text '2012-W01-8' could not be parsed");
+ assertJavaTimeParseException("2012-W01-8", "strict_week_date");
assertSameDate("2012-W48-6T10:15:30.123Z", "strict_week_date_time");
assertSameDate("2012-W48-6T10:15:30.123+0100", "strict_week_date_time");
@@ -585,19 +592,55 @@ public void testSamePrinterOutput() {
assertSamePrinterOutput("strictYear", javaDate, jodaDate);
assertSamePrinterOutput("strictYearMonth", javaDate, jodaDate);
assertSamePrinterOutput("strictYearMonthDay", javaDate, jodaDate);
+ assertSamePrinterOutput("strict_date_optional_time", javaDate, jodaDate);
+ assertSamePrinterOutput("epoch_millis", javaDate, jodaDate);
+ }
+
+ public void testSamePrinterOutputWithTimeZone() {
+ String format = "strict_date_optional_time";
+ String dateInput = "2017-02-01T08:02:00.000-01:00";
+ DateFormatter javaFormatter = DateFormatter.forPattern(format);
+ TemporalAccessor javaDate = javaFormatter.parse(dateInput);
+
+ DateFormatter jodaFormatter = Joda.forPattern(format);
+ DateTime dateTime = jodaFormatter.parseJoda(dateInput);
+
+ String javaDateString = javaFormatter.withZone(ZoneOffset.ofHours(-1)).format(javaDate);
+ String jodaDateString = jodaFormatter.withZone(ZoneOffset.ofHours(-1)).formatJoda(dateTime);
+ String message = String.format(Locale.ROOT, "expected string representation to be equal for format [%s]: joda [%s], java [%s]",
+ format, jodaDateString, javaDateString);
+ assertThat(message, javaDateString, is(jodaDateString));
+ }
+
+ public void testDateFormatterWithLocale() {
+ Locale locale = randomLocale(random());
+ String pattern = randomBoolean() ? "strict_date_optional_time||date_time" : "date_time||strict_date_optional_time";
+ DateFormatter formatter = DateFormatter.forPattern(pattern).withLocale(locale);
+ assertThat(formatter.pattern(), is(pattern));
+ assertThat(formatter.locale(), is(locale));
}
public void testSeveralTimeFormats() {
- DateFormatter jodaFormatter = DateFormatter.forPattern("year_month_day||ordinal_date");
- DateFormatter javaFormatter = DateFormatter.forPattern("8year_month_day||ordinal_date");
- assertSameDate("2018-12-12", "year_month_day||ordinal_date", jodaFormatter, javaFormatter);
- assertSameDate("2018-128", "year_month_day||ordinal_date", jodaFormatter, javaFormatter);
+ {
+ String format = "year_month_day||ordinal_date";
+ DateFormatter jodaFormatter = Joda.forPattern(format);
+ DateFormatter javaFormatter = DateFormatter.forPattern(format);
+ assertSameDate("2018-12-12", format, jodaFormatter, javaFormatter);
+ assertSameDate("2018-128", format, jodaFormatter, javaFormatter);
+ }
+ {
+ String format = "strictDateOptionalTime||dd-MM-yyyy";
+ DateFormatter jodaFormatter = Joda.forPattern(format);
+ DateFormatter javaFormatter = DateFormatter.forPattern(format);
+ assertSameDate("31-01-2014", format, jodaFormatter, javaFormatter);
+ }
}
private void assertSamePrinterOutput(String format, ZonedDateTime javaDate, DateTime jodaDate) {
assertThat(jodaDate.getMillis(), is(javaDate.toInstant().toEpochMilli()));
- String javaTimeOut = DateFormatters.forPattern(format).format(javaDate);
- String jodaTimeOut = DateFormatter.forPattern(format).formatJoda(jodaDate);
+ String javaTimeOut = DateFormatter.forPattern(format).format(javaDate);
+ String jodaTimeOut = Joda.forPattern(format).formatJoda(jodaDate);
+
if (JavaVersion.current().getVersion().get(0) == 8 && javaTimeOut.endsWith(".0")
&& (format.equals("epoch_second") || format.equals("epoch_millis"))) {
// java 8 has a bug in DateTimeFormatter usage when printing dates that rely on isSupportedBy for fields, which is
@@ -611,7 +654,7 @@ private void assertSamePrinterOutput(String format, ZonedDateTime javaDate, Date
private void assertSameDate(String input, String format) {
DateFormatter jodaFormatter = Joda.forPattern(format);
- DateFormatter javaFormatter = DateFormatters.forPattern(format);
+ DateFormatter javaFormatter = DateFormatter.forPattern(format);
assertSameDate(input, format, jodaFormatter, javaFormatter);
}
@@ -629,7 +672,7 @@ private void assertSameDate(String input, String format, DateFormatter jodaForma
private void assertParseException(String input, String format) {
assertJodaParseException(input, format, "Invalid format: \"" + input);
- assertJavaTimeParseException(input, format, "Text '" + input + "' could not be parsed");
+ assertJavaTimeParseException(input, format);
}
private void assertJodaParseException(String input, String format, String expectedMessage) {
@@ -638,9 +681,10 @@ private void assertJodaParseException(String input, String format, String expect
assertThat(e.getMessage(), containsString(expectedMessage));
}
- private void assertJavaTimeParseException(String input, String format, String expectedMessage) {
- DateFormatter javaTimeFormatter = DateFormatters.forPattern(format);
- DateTimeParseException dateTimeParseException = expectThrows(DateTimeParseException.class, () -> javaTimeFormatter.parse(input));
- assertThat(dateTimeParseException.getMessage(), startsWith(expectedMessage));
+ private void assertJavaTimeParseException(String input, String format) {
+ DateFormatter javaTimeFormatter = DateFormatter.forPattern(format);
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> javaTimeFormatter.parse(input));
+ assertThat(e.getMessage(), containsString(input));
+ assertThat(e.getMessage(), containsString(format));
}
}
diff --git a/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java b/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java
index e502dfc6f963f..19aea3f19ba3b 100644
--- a/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java
+++ b/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java
@@ -26,6 +26,7 @@
import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTimeZone;
+import java.time.Instant;
import java.time.ZoneId;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.LongSupplier;
@@ -35,7 +36,7 @@
public class JodaDateMathParserTests extends ESTestCase {
- DateFormatter formatter = DateFormatter.forPattern("dateOptionalTime||epoch_millis");
+ DateFormatter formatter = Joda.forPattern("dateOptionalTime||epoch_millis");
DateMathParser parser = formatter.toDateMathParser();
void assertDateMathEquals(String toTest, String expected) {
@@ -43,12 +44,12 @@ void assertDateMathEquals(String toTest, String expected) {
}
void assertDateMathEquals(String toTest, String expected, final long now, boolean roundUp, DateTimeZone timeZone) {
- long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone);
+ long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone).toEpochMilli();
assertDateEquals(gotMillis, toTest, expected);
}
void assertDateEquals(long gotMillis, String original, String expected) {
- long expectedMillis = parser.parse(expected, () -> 0);
+ long expectedMillis = parser.parse(expected, () -> 0).toEpochMilli();
if (gotMillis != expectedMillis) {
fail("Date math not equal\n" +
"Original : " + original + "\n" +
@@ -147,7 +148,7 @@ public void testMultipleAdjustments() {
public void testNow() {
- final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null);
+ final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null).toEpochMilli();
assertDateMathEquals("now", "2014-11-18T14:27:32", now, false, null);
assertDateMathEquals("now+M", "2014-12-18T14:27:32", now, false, null);
@@ -164,10 +165,10 @@ public void testRoundingPreservesEpochAsBaseDate() {
DateMathParser parser = formatter.toDateMathParser();
assertEquals(
this.formatter.parseMillis("1970-01-01T04:52:20.000Z"),
- parser.parse("04:52:20", () -> 0, false, (ZoneId) null));
+ parser.parse("04:52:20", () -> 0, false, (ZoneId) null).toEpochMilli());
assertEquals(
this.formatter.parseMillis("1970-01-01T04:52:20.999Z"),
- parser.parse("04:52:20", () -> 0, true, (ZoneId) null));
+ parser.parse("04:52:20", () -> 0, true, (ZoneId) null).toEpochMilli());
}
// Implicit rounding happening when parts of the date are not specified
@@ -185,9 +186,9 @@ public void testImplicitRounding() {
assertDateMathEquals("2014-11-18T09:20", "2014-11-18T08:20:59.999Z", 0, true, DateTimeZone.forID("CET"));
// implicit rounding with explicit timezone in the date format
- DateFormatter formatter = DateFormatter.forPattern("yyyy-MM-ddZ");
+ DateFormatter formatter = Joda.forPattern("yyyy-MM-ddZ");
DateMathParser parser = formatter.toDateMathParser();
- long time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null);
+ Instant time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null);
assertEquals(this.parser.parse("2011-10-09T00:00:00.000+01:00", () -> 0), time);
time = parser.parse("2011-10-09+01:00", () -> 0, true, (ZoneId) null);
assertEquals(this.parser.parse("2011-10-09T23:59:59.999+01:00", () -> 0), time);
@@ -261,7 +262,7 @@ public void testTimestamps() {
// also check other time units
JodaDateMathParser parser = new JodaDateMathParser(Joda.forPattern("epoch_second"));
- long datetime = parser.parse("1418248078", () -> 0);
+ long datetime = parser.parse("1418248078", () -> 0).toEpochMilli();
assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000");
// a timestamp before 10000 is a year
diff --git a/server/src/test/java/org/elasticsearch/common/joda/JodaTests.java b/server/src/test/java/org/elasticsearch/common/joda/JodaTests.java
index fde9d73fae892..003785b3c87b3 100644
--- a/server/src/test/java/org/elasticsearch/common/joda/JodaTests.java
+++ b/server/src/test/java/org/elasticsearch/common/joda/JodaTests.java
@@ -26,15 +26,18 @@
import java.time.ZoneOffset;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
+
public class JodaTests extends ESTestCase {
public void testBasicTTimePattern() {
- DateFormatter formatter1 = DateFormatter.forPattern("basic_t_time");
+ DateFormatter formatter1 = Joda.forPattern("basic_t_time");
assertEquals(formatter1.pattern(), "basic_t_time");
assertEquals(formatter1.zone(), ZoneOffset.UTC);
- DateFormatter formatter2 = DateFormatter.forPattern("basicTTime");
+ DateFormatter formatter2 = Joda.forPattern("basicTTime");
assertEquals(formatter2.pattern(), "basicTTime");
assertEquals(formatter2.zone(), ZoneOffset.UTC);
@@ -42,9 +45,25 @@ public void testBasicTTimePattern() {
assertEquals("T102030.040Z", formatter1.formatJoda(dt));
assertEquals("T102030.040Z", formatter1.formatJoda(dt));
- expectThrows(IllegalArgumentException.class, () -> DateFormatter.forPattern("basic_t_Time"));
- expectThrows(IllegalArgumentException.class, () -> DateFormatter.forPattern("basic_T_Time"));
- expectThrows(IllegalArgumentException.class, () -> DateFormatter.forPattern("basic_T_time"));
+ expectThrows(IllegalArgumentException.class, () -> Joda.forPattern("basic_t_Time"));
+ expectThrows(IllegalArgumentException.class, () -> Joda.forPattern("basic_T_Time"));
+ expectThrows(IllegalArgumentException.class, () -> Joda.forPattern("basic_T_time"));
}
+ public void testEqualsAndHashcode() {
+ String format = randomFrom("yyyy/MM/dd HH:mm:ss", "basic_t_time");
+ JodaDateFormatter first = Joda.forPattern(format);
+ JodaDateFormatter second = Joda.forPattern(format);
+ JodaDateFormatter third = Joda.forPattern(" HH:mm:ss, yyyy/MM/dd");
+
+ assertThat(first, is(second));
+ assertThat(second, is(first));
+ assertThat(first, is(not(third)));
+ assertThat(second, is(not(third)));
+
+ assertThat(first.hashCode(), is(second.hashCode()));
+ assertThat(second.hashCode(), is(first.hashCode()));
+ assertThat(first.hashCode(), is(not(third.hashCode())));
+ assertThat(second.hashCode(), is(not(third.hashCode())));
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/common/joda/SimpleJodaTests.java b/server/src/test/java/org/elasticsearch/common/joda/SimpleJodaTests.java
deleted file mode 100644
index b6f1b1b650a6f..0000000000000
--- a/server/src/test/java/org/elasticsearch/common/joda/SimpleJodaTests.java
+++ /dev/null
@@ -1,800 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.common.joda;
-
-import org.elasticsearch.common.time.DateFormatter;
-import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.index.mapper.RootObjectMapper;
-import org.elasticsearch.test.ESTestCase;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.LocalDateTime;
-import org.joda.time.MutableDateTime;
-import org.joda.time.format.DateTimeFormat;
-import org.joda.time.format.DateTimeFormatter;
-import org.joda.time.format.DateTimeFormatterBuilder;
-import org.joda.time.format.DateTimeParser;
-import org.joda.time.format.ISODateTimeFormat;
-
-import java.util.Date;
-import java.util.Locale;
-
-import static org.hamcrest.Matchers.containsString;
-import static org.hamcrest.Matchers.endsWith;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.is;
-
-public class SimpleJodaTests extends ESTestCase {
- public void testMultiParsers() {
- DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
- DateTimeParser[] parsers = new DateTimeParser[3];
- parsers[0] = DateTimeFormat.forPattern("MM/dd/yyyy").withZone(DateTimeZone.UTC).getParser();
- parsers[1] = DateTimeFormat.forPattern("MM-dd-yyyy").withZone(DateTimeZone.UTC).getParser();
- parsers[2] = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss").withZone(DateTimeZone.UTC).getParser();
- builder.append(DateTimeFormat.forPattern("MM/dd/yyyy").withZone(DateTimeZone.UTC).getPrinter(), parsers);
-
- DateTimeFormatter formatter = builder.toFormatter();
-
- formatter.parseMillis("2009-11-15 14:12:12");
- }
-
- public void testIsoDateFormatDateTimeNoMillisUTC() {
- DateTimeFormatter formatter = ISODateTimeFormat.dateTimeNoMillis().withZone(DateTimeZone.UTC);
- long millis = formatter.parseMillis("1970-01-01T00:00:00Z");
-
- assertThat(millis, equalTo(0L));
- }
-
- public void testUpperBound() {
- MutableDateTime dateTime = new MutableDateTime(3000, 12, 31, 23, 59, 59, 999, DateTimeZone.UTC);
- DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC);
-
- String value = "2000-01-01";
- int i = formatter.parseInto(dateTime, value, 0);
- assertThat(i, equalTo(value.length()));
- assertThat(dateTime.toString(), equalTo("2000-01-01T23:59:59.999Z"));
- }
-
- public void testIsoDateFormatDateOptionalTimeUTC() {
- DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC);
- long millis = formatter.parseMillis("1970-01-01T00:00:00Z");
- assertThat(millis, equalTo(0L));
- millis = formatter.parseMillis("1970-01-01T00:00:00.001Z");
- assertThat(millis, equalTo(1L));
- millis = formatter.parseMillis("1970-01-01T00:00:00.1Z");
- assertThat(millis, equalTo(100L));
- millis = formatter.parseMillis("1970-01-01T00:00:00.1");
- assertThat(millis, equalTo(100L));
- millis = formatter.parseMillis("1970-01-01T00:00:00");
- assertThat(millis, equalTo(0L));
- millis = formatter.parseMillis("1970-01-01");
- assertThat(millis, equalTo(0L));
-
- millis = formatter.parseMillis("1970");
- assertThat(millis, equalTo(0L));
-
- try {
- formatter.parseMillis("1970 kuku");
- fail("formatting should fail");
- } catch (IllegalArgumentException e) {
- // all is well
- }
-
- // test offset in format
- millis = formatter.parseMillis("1970-01-01T00:00:00-02:00");
- assertThat(millis, equalTo(TimeValue.timeValueHours(2).millis()));
- }
-
- public void testIsoVsCustom() {
- DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC);
- long millis = formatter.parseMillis("1970-01-01T00:00:00");
- assertThat(millis, equalTo(0L));
-
- formatter = DateTimeFormat.forPattern("yyyy/MM/dd HH:mm:ss").withZone(DateTimeZone.UTC);
- millis = formatter.parseMillis("1970/01/01 00:00:00");
- assertThat(millis, equalTo(0L));
-
- DateFormatter formatter2 = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss");
- millis = formatter2.parseMillis("1970/01/01 00:00:00");
- assertThat(millis, equalTo(0L));
- }
-
- public void testWriteAndParse() {
- DateTimeFormatter dateTimeWriter = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC);
- DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC);
- Date date = new Date();
- assertThat(formatter.parseMillis(dateTimeWriter.print(date.getTime())), equalTo(date.getTime()));
- }
-
- public void testSlashInFormat() {
- DateFormatter formatter = DateFormatter.forPattern("MM/yyyy");
- formatter.parseMillis("01/2001");
-
- DateFormatter formatter2 = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss");
- long millis = formatter2.parseMillis("1970/01/01 00:00:00");
- formatter2.formatMillis(millis);
-
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
- formatter2.parseMillis("1970/01/01"));
- }
-
- public void testMultipleFormats() {
- DateFormatter formatter = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd");
- long millis = formatter.parseMillis("1970/01/01 00:00:00");
- assertThat("1970/01/01 00:00:00", is(formatter.formatMillis(millis)));
- }
-
- public void testMultipleDifferentFormats() {
- DateFormatter formatter = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd");
- String input = "1970/01/01 00:00:00";
- long millis = formatter.parseMillis(input);
- assertThat(input, is(formatter.formatMillis(millis)));
-
- DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||dateOptionalTime");
- DateFormatter.forPattern("dateOptionalTime||yyyy/MM/dd HH:mm:ss||yyyy/MM/dd");
- DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||dateOptionalTime||yyyy/MM/dd");
- DateFormatter.forPattern("date_time||date_time_no_millis");
- DateFormatter.forPattern(" date_time || date_time_no_millis");
- }
-
- public void testInvalidPatterns() {
- expectInvalidPattern("does_not_exist_pattern", "Invalid format: [does_not_exist_pattern]: Illegal pattern component: o");
- expectInvalidPattern("OOOOO", "Invalid format: [OOOOO]: Illegal pattern component: OOOOO");
- expectInvalidPattern(null, "No date pattern provided");
- expectInvalidPattern("", "No date pattern provided");
- expectInvalidPattern(" ", "No date pattern provided");
- expectInvalidPattern("||date_time_no_millis", "No date pattern provided");
- expectInvalidPattern("date_time_no_millis||", "No date pattern provided");
- }
-
- private void expectInvalidPattern(String pattern, String errorMessage) {
- try {
- DateFormatter.forPattern(pattern);
- fail("Pattern " + pattern + " should have thrown an exception but did not");
- } catch (IllegalArgumentException e) {
- assertThat(e.getMessage(), containsString(errorMessage));
- }
- }
-
- public void testRounding() {
- long TIME = utcTimeInMillis("2009-02-03T01:01:01");
- MutableDateTime time = new MutableDateTime(DateTimeZone.UTC);
- time.setMillis(TIME);
- assertThat(time.monthOfYear().roundFloor().toString(), equalTo("2009-02-01T00:00:00.000Z"));
- time.setMillis(TIME);
- assertThat(time.hourOfDay().roundFloor().toString(), equalTo("2009-02-03T01:00:00.000Z"));
- time.setMillis(TIME);
- assertThat(time.dayOfMonth().roundFloor().toString(), equalTo("2009-02-03T00:00:00.000Z"));
- }
-
- public void testRoundingSetOnTime() {
- MutableDateTime time = new MutableDateTime(DateTimeZone.UTC);
- time.setRounding(time.getChronology().monthOfYear(), MutableDateTime.ROUND_FLOOR);
- time.setMillis(utcTimeInMillis("2009-02-03T01:01:01"));
- assertThat(time.toString(), equalTo("2009-02-01T00:00:00.000Z"));
- assertThat(time.getMillis(), equalTo(utcTimeInMillis("2009-02-01T00:00:00.000Z")));
-
- time.setMillis(utcTimeInMillis("2009-05-03T01:01:01"));
- assertThat(time.toString(), equalTo("2009-05-01T00:00:00.000Z"));
- assertThat(time.getMillis(), equalTo(utcTimeInMillis("2009-05-01T00:00:00.000Z")));
-
- time = new MutableDateTime(DateTimeZone.UTC);
- time.setRounding(time.getChronology().dayOfMonth(), MutableDateTime.ROUND_FLOOR);
- time.setMillis(utcTimeInMillis("2009-02-03T01:01:01"));
- assertThat(time.toString(), equalTo("2009-02-03T00:00:00.000Z"));
- assertThat(time.getMillis(), equalTo(utcTimeInMillis("2009-02-03T00:00:00.000Z")));
-
- time.setMillis(utcTimeInMillis("2009-02-02T23:01:01"));
- assertThat(time.toString(), equalTo("2009-02-02T00:00:00.000Z"));
- assertThat(time.getMillis(), equalTo(utcTimeInMillis("2009-02-02T00:00:00.000Z")));
-
- time = new MutableDateTime(DateTimeZone.UTC);
- time.setRounding(time.getChronology().weekOfWeekyear(), MutableDateTime.ROUND_FLOOR);
- time.setMillis(utcTimeInMillis("2011-05-05T01:01:01"));
- assertThat(time.toString(), equalTo("2011-05-02T00:00:00.000Z"));
- assertThat(time.getMillis(), equalTo(utcTimeInMillis("2011-05-02T00:00:00.000Z")));
- }
-
- public void testRoundingWithTimeZone() {
- MutableDateTime time = new MutableDateTime(DateTimeZone.UTC);
- time.setZone(DateTimeZone.forOffsetHours(-2));
- time.setRounding(time.getChronology().dayOfMonth(), MutableDateTime.ROUND_FLOOR);
-
- MutableDateTime utcTime = new MutableDateTime(DateTimeZone.UTC);
- utcTime.setRounding(utcTime.getChronology().dayOfMonth(), MutableDateTime.ROUND_FLOOR);
-
- time.setMillis(utcTimeInMillis("2009-02-03T01:01:01"));
- utcTime.setMillis(utcTimeInMillis("2009-02-03T01:01:01"));
-
- assertThat(time.toString(), equalTo("2009-02-02T00:00:00.000-02:00"));
- assertThat(utcTime.toString(), equalTo("2009-02-03T00:00:00.000Z"));
- // the time is on the 2nd, and utcTime is on the 3rd, but, because time already encapsulates
- // time zone, the millis diff is not 24, but 22 hours
- assertThat(time.getMillis(), equalTo(utcTime.getMillis() - TimeValue.timeValueHours(22).millis()));
-
- time.setMillis(utcTimeInMillis("2009-02-04T01:01:01"));
- utcTime.setMillis(utcTimeInMillis("2009-02-04T01:01:01"));
- assertThat(time.toString(), equalTo("2009-02-03T00:00:00.000-02:00"));
- assertThat(utcTime.toString(), equalTo("2009-02-04T00:00:00.000Z"));
- assertThat(time.getMillis(), equalTo(utcTime.getMillis() - TimeValue.timeValueHours(22).millis()));
- }
-
- public void testThatEpochsCanBeParsed() {
- boolean parseMilliSeconds = randomBoolean();
-
- // epoch: 1433144433655 => date: Mon Jun 1 09:40:33.655 CEST 2015
- DateFormatter formatter = DateFormatter.forPattern(parseMilliSeconds ? "epoch_millis" : "epoch_second");
- DateTime dateTime = formatter.parseJoda(parseMilliSeconds ? "1433144433655" : "1433144433");
-
- assertThat(dateTime.getYear(), is(2015));
- assertThat(dateTime.getDayOfMonth(), is(1));
- assertThat(dateTime.getMonthOfYear(), is(6));
- assertThat(dateTime.getHourOfDay(), is(7)); // utc timezone, +2 offset due to CEST
- assertThat(dateTime.getMinuteOfHour(), is(40));
- assertThat(dateTime.getSecondOfMinute(), is(33));
-
- if (parseMilliSeconds) {
- assertThat(dateTime.getMillisOfSecond(), is(655));
- } else {
- assertThat(dateTime.getMillisOfSecond(), is(0));
- }
-
- // test floats get truncated
- String epochFloatValue = String.format(Locale.US, "%d.%d", dateTime.getMillis() / (parseMilliSeconds ? 1L : 1000L),
- randomNonNegativeLong());
- assertThat(formatter.parseJoda(epochFloatValue).getMillis(), is(dateTime.getMillis()));
- }
-
- public void testThatNegativeEpochsCanBeParsed() {
- // problem: negative epochs can be arbitrary in size...
- boolean parseMilliSeconds = randomBoolean();
- DateFormatter formatter = DateFormatter.forPattern(parseMilliSeconds ? "epoch_millis" : "epoch_second");
- DateTime dateTime = formatter.parseJoda("-10000");
-
- assertThat(dateTime.getYear(), is(1969));
- assertThat(dateTime.getMonthOfYear(), is(12));
- assertThat(dateTime.getDayOfMonth(), is(31));
- if (parseMilliSeconds) {
- assertThat(dateTime.getHourOfDay(), is(23)); // utc timezone, +2 offset due to CEST
- assertThat(dateTime.getMinuteOfHour(), is(59));
- assertThat(dateTime.getSecondOfMinute(), is(50));
- } else {
- assertThat(dateTime.getHourOfDay(), is(21)); // utc timezone, +2 offset due to CEST
- assertThat(dateTime.getMinuteOfHour(), is(13));
- assertThat(dateTime.getSecondOfMinute(), is(20));
- }
-
- // test floats get truncated
- String epochFloatValue = String.format(Locale.US, "%d.%d", dateTime.getMillis() / (parseMilliSeconds ? 1L : 1000L),
- randomNonNegativeLong());
- assertThat(formatter.parseJoda(epochFloatValue).getMillis(), is(dateTime.getMillis()));
-
- // every negative epoch must be parsed, no matter if exact the size or bigger
- if (parseMilliSeconds) {
- formatter.parseJoda("-100000000");
- formatter.parseJoda("-999999999999");
- formatter.parseJoda("-1234567890123");
- formatter.parseJoda("-1234567890123456789");
-
- formatter.parseJoda("-1234567890123.9999");
- formatter.parseJoda("-1234567890123456789.9999");
- } else {
- formatter.parseJoda("-100000000");
- formatter.parseJoda("-1234567890");
- formatter.parseJoda("-1234567890123456");
-
- formatter.parseJoda("-1234567890.9999");
- formatter.parseJoda("-1234567890123456.9999");
- }
-
- assertWarnings("Use of negative values" +
- " in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch.");
- }
-
- public void testForInvalidDatesInEpochSecond() {
- DateFormatter formatter = DateFormatter.forPattern("epoch_second");
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
- formatter.parseJoda(randomFrom("invalid date", "12345678901234567", "12345678901234567890")));
- assertThat(e.getMessage(), containsString("Invalid format"));
- }
-
- public void testForInvalidDatesInEpochMillis() {
- DateFormatter formatter = DateFormatter.forPattern("epoch_millis");
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
- formatter.parseJoda(randomFrom("invalid date", "12345678901234567890")));
- assertThat(e.getMessage(), containsString("Invalid format"));
- }
-
- public void testForInvalidTimeZoneWithEpochSeconds() {
- DateTimeFormatter dateTimeFormatter = new DateTimeFormatterBuilder()
- .append(new Joda.EpochTimeParser(false))
- .toFormatter()
- .withZone(DateTimeZone.forOffsetHours(1))
- .withLocale(Locale.ROOT);
- DateFormatter formatter =
- new JodaDateFormatter("epoch_seconds", dateTimeFormatter, dateTimeFormatter);
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
- formatter.parseJoda("1433144433655"));
- assertThat(e.getMessage(), containsString("time_zone must be UTC"));
- }
-
- public void testForInvalidTimeZoneWithEpochMillis() {
- DateTimeFormatter dateTimeFormatter = new DateTimeFormatterBuilder()
- .append(new Joda.EpochTimeParser(true))
- .toFormatter()
- .withZone(DateTimeZone.forOffsetHours(1))
- .withLocale(Locale.ROOT);
- DateFormatter formatter =
- new JodaDateFormatter("epoch_millis", dateTimeFormatter, dateTimeFormatter);
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
- formatter.parseJoda("1433144433"));
- assertThat(e.getMessage(), containsString("time_zone must be UTC"));
- }
-
- public void testThatEpochParserIsPrinter() {
- JodaDateFormatter formatter = Joda.forPattern("epoch_millis");
- assertThat(formatter.parser.isPrinter(), is(true));
- assertThat(formatter.printer.isPrinter(), is(true));
-
- JodaDateFormatter epochSecondFormatter = Joda.forPattern("epoch_second");
- assertThat(epochSecondFormatter.parser.isPrinter(), is(true));
- assertThat(epochSecondFormatter.printer.isPrinter(), is(true));
- }
-
- public void testThatEpochTimePrinterWorks() {
- StringBuffer buffer = new StringBuffer();
- LocalDateTime now = LocalDateTime.now();
-
- Joda.EpochTimePrinter epochTimePrinter = new Joda.EpochTimePrinter(false);
- epochTimePrinter.printTo(buffer, now, Locale.ROOT);
- assertThat(buffer.length(), is(10));
- // only check the last digit, as seconds go from 0-99 in the unix timestamp and don't stop at 60
- assertThat(buffer.toString(), endsWith(String.valueOf(now.getSecondOfMinute() % 10)));
-
- buffer = new StringBuffer();
- Joda.EpochTimePrinter epochMilliSecondTimePrinter = new Joda.EpochTimePrinter(true);
- epochMilliSecondTimePrinter.printTo(buffer, now, Locale.ROOT);
- assertThat(buffer.length(), is(13));
- assertThat(buffer.toString(), endsWith(String.valueOf(now.getMillisOfSecond())));
- }
-
- public void testThatEpochParserIsIdempotent() {
- DateFormatter formatter = DateFormatter.forPattern("epoch_millis");
- DateTime dateTime = formatter.parseJoda("1234567890123");
- assertThat(dateTime.getMillis(), is(1234567890123L));
- dateTime = formatter.parseJoda("1234567890456");
- assertThat(dateTime.getMillis(), is(1234567890456L));
- dateTime = formatter.parseJoda("1234567890789");
- assertThat(dateTime.getMillis(), is(1234567890789L));
- dateTime = formatter.parseJoda("1234567890123456789");
- assertThat(dateTime.getMillis(), is(1234567890123456789L));
-
- DateFormatter secondsFormatter = DateFormatter.forPattern("epoch_second");
- DateTime secondsDateTime = secondsFormatter.parseJoda("1234567890");
- assertThat(secondsDateTime.getMillis(), is(1234567890000L));
- secondsDateTime = secondsFormatter.parseJoda("1234567890");
- assertThat(secondsDateTime.getMillis(), is(1234567890000L));
- secondsDateTime = secondsFormatter.parseJoda("1234567890");
- assertThat(secondsDateTime.getMillis(), is(1234567890000L));
- secondsDateTime = secondsFormatter.parseJoda("1234567890123456");
- assertThat(secondsDateTime.getMillis(), is(1234567890123456000L));
- }
-
- public void testThatDefaultFormatterChecksForCorrectYearLength() throws Exception {
- // if no strict version is tested, this means the date format is already strict by itself
- // yyyyMMdd
- assertValidDateFormatParsing("basicDate", "20140303");
- assertDateFormatParsingThrowingException("basicDate", "2010303");
-
- // yyyyMMdd’T'HHmmss.SSSZ
- assertValidDateFormatParsing("basicDateTime", "20140303T124343.123Z");
- assertValidDateFormatParsing("basicDateTime", "00050303T124343.123Z");
- assertDateFormatParsingThrowingException("basicDateTime", "50303T124343.123Z");
-
- // yyyyMMdd’T'HHmmssZ
- assertValidDateFormatParsing("basicDateTimeNoMillis", "20140303T124343Z");
- assertValidDateFormatParsing("basicDateTimeNoMillis", "00050303T124343Z");
- assertDateFormatParsingThrowingException("basicDateTimeNoMillis", "50303T124343Z");
-
- // yyyyDDD
- assertValidDateFormatParsing("basicOrdinalDate", "0005165");
- assertDateFormatParsingThrowingException("basicOrdinalDate", "5165");
-
- // yyyyDDD’T'HHmmss.SSSZ
- assertValidDateFormatParsing("basicOrdinalDateTime", "0005165T124343.123Z");
- assertValidDateFormatParsing("basicOrdinalDateTime", "0005165T124343.123Z");
- assertDateFormatParsingThrowingException("basicOrdinalDateTime", "5165T124343.123Z");
-
- // yyyyDDD’T'HHmmssZ
- assertValidDateFormatParsing("basicOrdinalDateTimeNoMillis", "0005165T124343Z");
- assertValidDateFormatParsing("basicOrdinalDateTimeNoMillis", "0005165T124343Z");
- assertDateFormatParsingThrowingException("basicOrdinalDateTimeNoMillis", "5165T124343Z");
-
- // HHmmss.SSSZ
- assertValidDateFormatParsing("basicTime", "090909.123Z");
- assertDateFormatParsingThrowingException("basicTime", "90909.123Z");
-
- // HHmmssZ
- assertValidDateFormatParsing("basicTimeNoMillis", "090909Z");
- assertDateFormatParsingThrowingException("basicTimeNoMillis", "90909Z");
-
- // 'T’HHmmss.SSSZ
- assertValidDateFormatParsing("basicTTime", "T090909.123Z");
- assertDateFormatParsingThrowingException("basicTTime", "T90909.123Z");
-
- // T’HHmmssZ
- assertValidDateFormatParsing("basicTTimeNoMillis", "T090909Z");
- assertDateFormatParsingThrowingException("basicTTimeNoMillis", "T90909Z");
-
- // xxxx’W'wwe
- assertValidDateFormatParsing("basicWeekDate", "0005W414");
- assertValidDateFormatParsing("basicWeekDate", "5W414", "0005W414");
- assertDateFormatParsingThrowingException("basicWeekDate", "5W14");
-
- assertValidDateFormatParsing("strictBasicWeekDate", "0005W414");
- assertDateFormatParsingThrowingException("strictBasicWeekDate", "0005W47");
- assertDateFormatParsingThrowingException("strictBasicWeekDate", "5W414");
- assertDateFormatParsingThrowingException("strictBasicWeekDate", "5W14");
-
- // xxxx’W'wwe’T'HHmmss.SSSZ
- assertValidDateFormatParsing("basicWeekDateTime", "0005W414T124343.123Z");
- assertValidDateFormatParsing("basicWeekDateTime", "5W414T124343.123Z", "0005W414T124343.123Z");
- assertDateFormatParsingThrowingException("basicWeekDateTime", "5W14T124343.123Z");
-
- assertValidDateFormatParsing("strictBasicWeekDateTime", "0005W414T124343.123Z");
- assertDateFormatParsingThrowingException("strictBasicWeekDateTime", "0005W47T124343.123Z");
- assertDateFormatParsingThrowingException("strictBasicWeekDateTime", "5W414T124343.123Z");
- assertDateFormatParsingThrowingException("strictBasicWeekDateTime", "5W14T124343.123Z");
-
- // xxxx’W'wwe’T'HHmmssZ
- assertValidDateFormatParsing("basicWeekDateTimeNoMillis", "0005W414T124343Z");
- assertValidDateFormatParsing("basicWeekDateTimeNoMillis", "5W414T124343Z", "0005W414T124343Z");
- assertDateFormatParsingThrowingException("basicWeekDateTimeNoMillis", "5W14T124343Z");
-
- assertValidDateFormatParsing("strictBasicWeekDateTimeNoMillis", "0005W414T124343Z");
- assertDateFormatParsingThrowingException("strictBasicWeekDateTimeNoMillis", "0005W47T124343Z");
- assertDateFormatParsingThrowingException("strictBasicWeekDateTimeNoMillis", "5W414T124343Z");
- assertDateFormatParsingThrowingException("strictBasicWeekDateTimeNoMillis", "5W14T124343Z");
-
- // yyyy-MM-dd
- assertValidDateFormatParsing("date", "0005-06-03");
- assertValidDateFormatParsing("date", "5-6-3", "0005-06-03");
-
- assertValidDateFormatParsing("strictDate", "0005-06-03");
- assertDateFormatParsingThrowingException("strictDate", "5-6-3");
- assertDateFormatParsingThrowingException("strictDate", "0005-06-3");
- assertDateFormatParsingThrowingException("strictDate", "0005-6-03");
- assertDateFormatParsingThrowingException("strictDate", "5-06-03");
-
- // yyyy-MM-dd'T'HH
- assertValidDateFormatParsing("dateHour", "0005-06-03T12");
- assertValidDateFormatParsing("dateHour", "5-6-3T1", "0005-06-03T01");
-
- assertValidDateFormatParsing("strictDateHour", "0005-06-03T12");
- assertDateFormatParsingThrowingException("strictDateHour", "5-6-3T1");
-
- // yyyy-MM-dd'T'HH:mm
- assertValidDateFormatParsing("dateHourMinute", "0005-06-03T12:12");
- assertValidDateFormatParsing("dateHourMinute", "5-6-3T12:1", "0005-06-03T12:01");
-
- assertValidDateFormatParsing("strictDateHourMinute", "0005-06-03T12:12");
- assertDateFormatParsingThrowingException("strictDateHourMinute", "5-6-3T12:1");
-
- // yyyy-MM-dd'T'HH:mm:ss
- assertValidDateFormatParsing("dateHourMinuteSecond", "0005-06-03T12:12:12");
- assertValidDateFormatParsing("dateHourMinuteSecond", "5-6-3T12:12:1", "0005-06-03T12:12:01");
-
- assertValidDateFormatParsing("strictDateHourMinuteSecond", "0005-06-03T12:12:12");
- assertDateFormatParsingThrowingException("strictDateHourMinuteSecond", "5-6-3T12:12:1");
-
- // yyyy-MM-dd’T'HH:mm:ss.SSS
- assertValidDateFormatParsing("dateHourMinuteSecondFraction", "0005-06-03T12:12:12.123");
- assertValidDateFormatParsing("dateHourMinuteSecondFraction", "5-6-3T12:12:1.123", "0005-06-03T12:12:01.123");
- assertValidDateFormatParsing("dateHourMinuteSecondFraction", "5-6-3T12:12:1.1", "0005-06-03T12:12:01.100");
-
- assertValidDateFormatParsing("strictDateHourMinuteSecondFraction", "0005-06-03T12:12:12.123");
- assertDateFormatParsingThrowingException("strictDateHourMinuteSecondFraction", "5-6-3T12:12:12.1");
- assertDateFormatParsingThrowingException("strictDateHourMinuteSecondFraction", "5-6-3T12:12:12.12");
-
- assertValidDateFormatParsing("dateHourMinuteSecondMillis", "0005-06-03T12:12:12.123");
- assertValidDateFormatParsing("dateHourMinuteSecondMillis", "5-6-3T12:12:1.123", "0005-06-03T12:12:01.123");
- assertValidDateFormatParsing("dateHourMinuteSecondMillis", "5-6-3T12:12:1.1", "0005-06-03T12:12:01.100");
-
- assertValidDateFormatParsing("strictDateHourMinuteSecondMillis", "0005-06-03T12:12:12.123");
- assertDateFormatParsingThrowingException("strictDateHourMinuteSecondMillis", "5-6-3T12:12:12.1");
- assertDateFormatParsingThrowingException("strictDateHourMinuteSecondMillis", "5-6-3T12:12:12.12");
-
- // yyyy-MM-dd'T'HH:mm:ss.SSSZ
- assertValidDateFormatParsing("dateOptionalTime", "2014-03-03", "2014-03-03T00:00:00.000Z");
- assertValidDateFormatParsing("dateOptionalTime", "1257-3-03", "1257-03-03T00:00:00.000Z");
- assertValidDateFormatParsing("dateOptionalTime", "0005-03-3", "0005-03-03T00:00:00.000Z");
- assertValidDateFormatParsing("dateOptionalTime", "5-03-03", "0005-03-03T00:00:00.000Z");
- assertValidDateFormatParsing("dateOptionalTime", "5-03-03T1:1:1.1", "0005-03-03T01:01:01.100Z");
- assertValidDateFormatParsing("strictDateOptionalTime", "2014-03-03", "2014-03-03T00:00:00.000Z");
- assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03");
- assertDateFormatParsingThrowingException("strictDateOptionalTime", "0005-3-03");
- assertDateFormatParsingThrowingException("strictDateOptionalTime", "0005-03-3");
- assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03T1:1:1.1");
- assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03T01:01:01.1");
- assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03T01:01:1.100");
- assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03T01:1:01.100");
- assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03T1:01:01.100");
-
- // yyyy-MM-dd’T'HH:mm:ss.SSSZZ
- assertValidDateFormatParsing("dateTime", "5-03-03T1:1:1.1Z", "0005-03-03T01:01:01.100Z");
- assertValidDateFormatParsing("strictDateTime", "2014-03-03T11:11:11.100Z", "2014-03-03T11:11:11.100Z");
- assertDateFormatParsingThrowingException("strictDateTime", "0005-03-03T1:1:1.1Z");
- assertDateFormatParsingThrowingException("strictDateTime", "0005-03-03T01:01:1.100Z");
- assertDateFormatParsingThrowingException("strictDateTime", "0005-03-03T01:1:01.100Z");
- assertDateFormatParsingThrowingException("strictDateTime", "0005-03-03T1:01:01.100Z");
-
- // yyyy-MM-dd’T'HH:mm:ssZZ
- assertValidDateFormatParsing("dateTimeNoMillis", "5-03-03T1:1:1Z", "0005-03-03T01:01:01Z");
- assertValidDateFormatParsing("strictDateTimeNoMillis", "2014-03-03T11:11:11Z", "2014-03-03T11:11:11Z");
- assertDateFormatParsingThrowingException("strictDateTimeNoMillis", "0005-03-03T1:1:1Z");
- assertDateFormatParsingThrowingException("strictDateTimeNoMillis", "0005-03-03T01:01:1Z");
- assertDateFormatParsingThrowingException("strictDateTimeNoMillis", "0005-03-03T01:1:01Z");
- assertDateFormatParsingThrowingException("strictDateTimeNoMillis", "0005-03-03T1:01:01Z");
-
- // HH
- assertValidDateFormatParsing("hour", "12");
- assertValidDateFormatParsing("hour", "1", "01");
- assertValidDateFormatParsing("strictHour", "12");
- assertValidDateFormatParsing("strictHour", "01");
- assertDateFormatParsingThrowingException("strictHour", "1");
-
- // HH:mm
- assertValidDateFormatParsing("hourMinute", "12:12");
- assertValidDateFormatParsing("hourMinute", "12:1", "12:01");
- assertValidDateFormatParsing("strictHourMinute", "12:12");
- assertValidDateFormatParsing("strictHourMinute", "12:01");
- assertDateFormatParsingThrowingException("strictHourMinute", "12:1");
-
- // HH:mm:ss
- assertValidDateFormatParsing("hourMinuteSecond", "12:12:12");
- assertValidDateFormatParsing("hourMinuteSecond", "12:12:1", "12:12:01");
- assertValidDateFormatParsing("strictHourMinuteSecond", "12:12:12");
- assertValidDateFormatParsing("strictHourMinuteSecond", "12:12:01");
- assertDateFormatParsingThrowingException("strictHourMinuteSecond", "12:12:1");
-
- // HH:mm:ss.SSS
- assertValidDateFormatParsing("hourMinuteSecondFraction", "12:12:12.123");
- assertValidDateFormatParsing("hourMinuteSecondFraction", "12:12:12.1", "12:12:12.100");
- assertValidDateFormatParsing("strictHourMinuteSecondFraction", "12:12:12.123");
- assertValidDateFormatParsing("strictHourMinuteSecondFraction", "12:12:12.1", "12:12:12.100");
-
- assertValidDateFormatParsing("hourMinuteSecondMillis", "12:12:12.123");
- assertValidDateFormatParsing("hourMinuteSecondMillis", "12:12:12.1", "12:12:12.100");
- assertValidDateFormatParsing("strictHourMinuteSecondMillis", "12:12:12.123");
- assertValidDateFormatParsing("strictHourMinuteSecondMillis", "12:12:12.1", "12:12:12.100");
-
- // yyyy-DDD
- assertValidDateFormatParsing("ordinalDate", "5-3", "0005-003");
- assertValidDateFormatParsing("strictOrdinalDate", "0005-003");
- assertDateFormatParsingThrowingException("strictOrdinalDate", "5-3");
- assertDateFormatParsingThrowingException("strictOrdinalDate", "0005-3");
- assertDateFormatParsingThrowingException("strictOrdinalDate", "5-003");
-
- // yyyy-DDD’T'HH:mm:ss.SSSZZ
- assertValidDateFormatParsing("ordinalDateTime", "5-3T12:12:12.100Z", "0005-003T12:12:12.100Z");
- assertValidDateFormatParsing("strictOrdinalDateTime", "0005-003T12:12:12.100Z");
- assertDateFormatParsingThrowingException("strictOrdinalDateTime", "5-3T1:12:12.123Z");
- assertDateFormatParsingThrowingException("strictOrdinalDateTime", "5-3T12:1:12.123Z");
- assertDateFormatParsingThrowingException("strictOrdinalDateTime", "5-3T12:12:1.123Z");
-
- // yyyy-DDD’T'HH:mm:ssZZ
- assertValidDateFormatParsing("ordinalDateTimeNoMillis", "5-3T12:12:12Z", "0005-003T12:12:12Z");
- assertValidDateFormatParsing("strictOrdinalDateTimeNoMillis", "0005-003T12:12:12Z");
- assertDateFormatParsingThrowingException("strictOrdinalDateTimeNoMillis", "5-3T1:12:12Z");
- assertDateFormatParsingThrowingException("strictOrdinalDateTimeNoMillis", "5-3T12:1:12Z");
- assertDateFormatParsingThrowingException("strictOrdinalDateTimeNoMillis", "5-3T12:12:1Z");
-
-
- // HH:mm:ss.SSSZZ
- assertValidDateFormatParsing("time", "12:12:12.100Z");
- assertValidDateFormatParsing("time", "01:01:01.1Z", "01:01:01.100Z");
- assertValidDateFormatParsing("time", "1:1:1.1Z", "01:01:01.100Z");
- assertValidDateFormatParsing("strictTime", "12:12:12.100Z");
- assertDateFormatParsingThrowingException("strictTime", "12:12:1.100Z");
- assertDateFormatParsingThrowingException("strictTime", "12:1:12.100Z");
- assertDateFormatParsingThrowingException("strictTime", "1:12:12.100Z");
-
- // HH:mm:ssZZ
- assertValidDateFormatParsing("timeNoMillis", "12:12:12Z");
- assertValidDateFormatParsing("timeNoMillis", "01:01:01Z", "01:01:01Z");
- assertValidDateFormatParsing("timeNoMillis", "1:1:1Z", "01:01:01Z");
- assertValidDateFormatParsing("strictTimeNoMillis", "12:12:12Z");
- assertDateFormatParsingThrowingException("strictTimeNoMillis", "12:12:1Z");
- assertDateFormatParsingThrowingException("strictTimeNoMillis", "12:1:12Z");
- assertDateFormatParsingThrowingException("strictTimeNoMillis", "1:12:12Z");
-
- // 'T’HH:mm:ss.SSSZZ
- assertValidDateFormatParsing("tTime", "T12:12:12.100Z");
- assertValidDateFormatParsing("tTime", "T01:01:01.1Z", "T01:01:01.100Z");
- assertValidDateFormatParsing("tTime", "T1:1:1.1Z", "T01:01:01.100Z");
- assertValidDateFormatParsing("strictTTime", "T12:12:12.100Z");
- assertDateFormatParsingThrowingException("strictTTime", "T12:12:1.100Z");
- assertDateFormatParsingThrowingException("strictTTime", "T12:1:12.100Z");
- assertDateFormatParsingThrowingException("strictTTime", "T1:12:12.100Z");
-
- // 'T’HH:mm:ssZZ
- assertValidDateFormatParsing("tTimeNoMillis", "T12:12:12Z");
- assertValidDateFormatParsing("tTimeNoMillis", "T01:01:01Z", "T01:01:01Z");
- assertValidDateFormatParsing("tTimeNoMillis", "T1:1:1Z", "T01:01:01Z");
- assertValidDateFormatParsing("strictTTimeNoMillis", "T12:12:12Z");
- assertDateFormatParsingThrowingException("strictTTimeNoMillis", "T12:12:1Z");
- assertDateFormatParsingThrowingException("strictTTimeNoMillis", "T12:1:12Z");
- assertDateFormatParsingThrowingException("strictTTimeNoMillis", "T1:12:12Z");
-
- // xxxx-'W’ww-e
- assertValidDateFormatParsing("weekDate", "0005-W4-1", "0005-W04-1");
- assertValidDateFormatParsing("strictWeekDate", "0005-W04-1");
- assertDateFormatParsingThrowingException("strictWeekDate", "0005-W4-1");
-
- // xxxx-'W’ww-e’T'HH:mm:ss.SSSZZ
- assertValidDateFormatParsing("weekDateTime", "0005-W41-4T12:43:43.123Z");
- assertValidDateFormatParsing("weekDateTime", "5-W41-4T12:43:43.123Z", "0005-W41-4T12:43:43.123Z");
- assertValidDateFormatParsing("strictWeekDateTime", "0005-W41-4T12:43:43.123Z");
- assertValidDateFormatParsing("strictWeekDateTime", "0005-W06-4T12:43:43.123Z");
- assertDateFormatParsingThrowingException("strictWeekDateTime", "0005-W4-7T12:43:43.123Z");
- assertDateFormatParsingThrowingException("strictWeekDateTime", "5-W41-4T12:43:43.123Z");
- assertDateFormatParsingThrowingException("strictWeekDateTime", "5-W1-4T12:43:43.123Z");
-
- // xxxx-'W’ww-e’T'HH:mm:ssZZ
- assertValidDateFormatParsing("weekDateTimeNoMillis", "0005-W41-4T12:43:43Z");
- assertValidDateFormatParsing("weekDateTimeNoMillis", "5-W41-4T12:43:43Z", "0005-W41-4T12:43:43Z");
- assertValidDateFormatParsing("strictWeekDateTimeNoMillis", "0005-W41-4T12:43:43Z");
- assertValidDateFormatParsing("strictWeekDateTimeNoMillis", "0005-W06-4T12:43:43Z");
- assertDateFormatParsingThrowingException("strictWeekDateTimeNoMillis", "0005-W4-7T12:43:43Z");
- assertDateFormatParsingThrowingException("strictWeekDateTimeNoMillis", "5-W41-4T12:43:43Z");
- assertDateFormatParsingThrowingException("strictWeekDateTimeNoMillis", "5-W1-4T12:43:43Z");
-
- // yyyy
- assertValidDateFormatParsing("weekyear", "2014");
- assertValidDateFormatParsing("weekyear", "5", "0005");
- assertValidDateFormatParsing("weekyear", "0005");
- assertValidDateFormatParsing("strictWeekyear", "2014");
- assertValidDateFormatParsing("strictWeekyear", "0005");
- assertDateFormatParsingThrowingException("strictWeekyear", "5");
-
- // yyyy-'W'ee
- assertValidDateFormatParsing("weekyearWeek", "2014-W41");
- assertValidDateFormatParsing("weekyearWeek", "2014-W1", "2014-W01");
- assertValidDateFormatParsing("strictWeekyearWeek", "2014-W41");
- assertDateFormatParsingThrowingException("strictWeekyearWeek", "2014-W1");
-
- // weekyearWeekDay
- assertValidDateFormatParsing("weekyearWeekDay", "2014-W41-1");
- assertValidDateFormatParsing("weekyearWeekDay", "2014-W1-1", "2014-W01-1");
- assertValidDateFormatParsing("strictWeekyearWeekDay", "2014-W41-1");
- assertDateFormatParsingThrowingException("strictWeekyearWeekDay", "2014-W1-1");
-
- // yyyy
- assertValidDateFormatParsing("year", "2014");
- assertValidDateFormatParsing("year", "5", "0005");
- assertValidDateFormatParsing("strictYear", "2014");
- assertDateFormatParsingThrowingException("strictYear", "5");
-
- // yyyy-mm
- assertValidDateFormatParsing("yearMonth", "2014-12");
- assertValidDateFormatParsing("yearMonth", "2014-5", "2014-05");
- assertValidDateFormatParsing("strictYearMonth", "2014-12");
- assertDateFormatParsingThrowingException("strictYearMonth", "2014-5");
-
- // yyyy-mm-dd
- assertValidDateFormatParsing("yearMonthDay", "2014-12-12");
- assertValidDateFormatParsing("yearMonthDay", "2014-05-5", "2014-05-05");
- assertValidDateFormatParsing("strictYearMonthDay", "2014-12-12");
- assertDateFormatParsingThrowingException("strictYearMonthDay", "2014-05-5");
- }
-
- public void testThatRootObjectParsingIsStrict() throws Exception {
- String[] datesThatWork = new String[] { "2014/10/10", "2014/10/10 12:12:12", "2014-05-05", "2014-05-05T12:12:12.123Z" };
- String[] datesThatShouldNotWork = new String[]{ "5-05-05", "2014-5-05", "2014-05-5",
- "2014-05-05T1:12:12.123Z", "2014-05-05T12:1:12.123Z", "2014-05-05T12:12:1.123Z",
- "4/10/10", "2014/1/10", "2014/10/1",
- "2014/10/10 1:12:12", "2014/10/10 12:1:12", "2014/10/10 12:12:1"
- };
-
- // good case
- for (String date : datesThatWork) {
- boolean dateParsingSuccessful = false;
- for (DateFormatter dateTimeFormatter : RootObjectMapper.Defaults.DYNAMIC_DATE_TIME_FORMATTERS) {
- try {
- dateTimeFormatter.parseMillis(date);
- dateParsingSuccessful = true;
- break;
- } catch (Exception e) {}
- }
- if (!dateParsingSuccessful) {
- fail("Parsing for date " + date + " in root object mapper failed, but shouldnt");
- }
- }
-
- // bad case
- for (String date : datesThatShouldNotWork) {
- for (DateFormatter dateTimeFormatter : RootObjectMapper.Defaults.DYNAMIC_DATE_TIME_FORMATTERS) {
- try {
- dateTimeFormatter.parseMillis(date);
- fail(String.format(Locale.ROOT, "Expected exception when parsing date %s in root mapper", date));
- } catch (Exception e) {}
- }
- }
- }
-
- public void testDeprecatedFormatSpecifiers() {
- Joda.forPattern("CC");
- assertWarnings("Use of 'C' (century-of-era) is deprecated and will not be supported in the" +
- " next major version of Elasticsearch.");
- Joda.forPattern("YYYY");
- assertWarnings("Use of 'Y' (year-of-era) will change to 'y' in the" +
- " next major version of Elasticsearch. Prefix your date format with '8' to use the new specifier.");
- Joda.forPattern("xxxx");
- assertWarnings("Use of 'x' (week-based-year) will change" +
- " to 'Y' in the next major version of Elasticsearch. Prefix your date format with '8' to use the new specifier.");
- // multiple deprecations
- Joda.forPattern("CC-YYYY");
- assertWarnings("Use of 'C' (century-of-era) is deprecated and will not be supported in the" +
- " next major version of Elasticsearch.", "Use of 'Y' (year-of-era) will change to 'y' in the" +
- " next major version of Elasticsearch. Prefix your date format with '8' to use the new specifier.");
- }
-
- public void testDeprecatedEpochScientificNotation() {
- assertValidDateFormatParsing("epoch_second", "1.234e5", "123400");
- assertWarnings("Use of scientific notation" +
- " in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch.");
- assertValidDateFormatParsing("epoch_millis", "1.234e5", "123400");
- assertWarnings("Use of scientific notation" +
- " in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch.");
- }
-
- public void testDeprecatedEpochNegative() {
- assertValidDateFormatParsing("epoch_second", "-12345", "-12345");
- assertWarnings("Use of negative values" +
- " in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch.");
- assertValidDateFormatParsing("epoch_millis", "-12345", "-12345");
- assertWarnings("Use of negative values" +
- " in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch.");
- }
-
- private void assertValidDateFormatParsing(String pattern, String dateToParse) {
- assertValidDateFormatParsing(pattern, dateToParse, dateToParse);
- }
-
- private void assertValidDateFormatParsing(String pattern, String dateToParse, String expectedDate) {
- DateFormatter formatter = DateFormatter.forPattern(pattern);
- assertThat(formatter.formatMillis(formatter.parseMillis(dateToParse)), is(expectedDate));
- }
-
- private void assertDateFormatParsingThrowingException(String pattern, String invalidDate) {
- try {
- DateFormatter formatter = DateFormatter.forPattern(pattern);
- formatter.parseMillis(invalidDate);
- fail(String.format(Locale.ROOT, "Expected parsing exception for pattern [%s] with date [%s], but did not happen",
- pattern, invalidDate));
- } catch (IllegalArgumentException e) {
- }
- }
-
- private long utcTimeInMillis(String time) {
- return ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC).parseMillis(time);
- }
-
-}
diff --git a/server/src/test/java/org/elasticsearch/common/rounding/RoundingDuelTests.java b/server/src/test/java/org/elasticsearch/common/rounding/RoundingDuelTests.java
index 7e3dbdd5b94df..3ee4ce0e7d7bf 100644
--- a/server/src/test/java/org/elasticsearch/common/rounding/RoundingDuelTests.java
+++ b/server/src/test/java/org/elasticsearch/common/rounding/RoundingDuelTests.java
@@ -19,9 +19,11 @@
package org.elasticsearch.common.rounding;
+import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.VersionUtils;
import java.time.ZoneOffset;
@@ -42,6 +44,7 @@ public void testSerialization() throws Exception {
rounding = org.elasticsearch.common.Rounding.builder(timeValue()).timeZone(ZoneOffset.UTC).build();
}
BytesStreamOutput output = new BytesStreamOutput();
+ output.setVersion(VersionUtils.getPreviousVersion(Version.V_7_0_0));
rounding.writeTo(output);
Rounding roundingJoda = Rounding.Streams.read(output.bytes().streamInput());
diff --git a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java
index feb406c61c966..96ef39e430178 100644
--- a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java
+++ b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java
@@ -25,7 +25,6 @@
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
-import java.time.format.DateTimeParseException;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAccessor;
import java.util.Locale;
@@ -57,13 +56,13 @@ public void testEpochMillisParser() {
}
}
- public void testEpochMilliParser() {
+ public void testInvalidEpochMilliParser() {
DateFormatter formatter = DateFormatters.forPattern("epoch_millis");
- DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("invalid"));
- assertThat(e.getMessage(), containsString("could not be parsed"));
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("invalid"));
+ assertThat(e.getMessage(), containsString("failed to parse date field [invalid] with format [epoch_millis]"));
- e = expectThrows(DateTimeParseException.class, () -> formatter.parse("123.1234567"));
- assertThat(e.getMessage(), containsString("unparsed text found at index 3"));
+ e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("123.1234567"));
+ assertThat(e.getMessage(), containsString("failed to parse date field [123.1234567] with format [epoch_millis]"));
}
// this is not in the duelling tests, because the epoch second parser in joda time drops the milliseconds after the comma
@@ -72,14 +71,14 @@ public void testEpochMilliParser() {
public void testEpochSecondParser() {
DateFormatter formatter = DateFormatters.forPattern("epoch_second");
- DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.1"));
- assertThat(e.getMessage(), is("Text '1234.1' could not be parsed, unparsed text found at index 4"));
- e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234."));
- assertThat(e.getMessage(), is("Text '1234.' could not be parsed, unparsed text found at index 4"));
- e = expectThrows(DateTimeParseException.class, () -> formatter.parse("abc"));
- assertThat(e.getMessage(), is("Text 'abc' could not be parsed, unparsed text found at index 0"));
- e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.abc"));
- assertThat(e.getMessage(), is("Text '1234.abc' could not be parsed, unparsed text found at index 4"));
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("1234.1234567890"));
+ assertThat(e.getMessage(), is("failed to parse date field [1234.1234567890] with format [epoch_second]"));
+ e = expectThrows(IllegalArgumentException .class, () -> formatter.parse("1234.123456789013221"));
+ assertThat(e.getMessage(), containsString("[1234.123456789013221]"));
+ e = expectThrows(IllegalArgumentException .class, () -> formatter.parse("abc"));
+ assertThat(e.getMessage(), containsString("[abc]"));
+ e = expectThrows(IllegalArgumentException .class, () -> formatter.parse("1234.abc"));
+ assertThat(e.getMessage(), containsString("[1234.abc]"));
}
public void testEpochMilliParsersWithDifferentFormatters() {
@@ -139,7 +138,7 @@ public void testEqualsAndHashcode() {
assertThat(epochMillisFormatter, equalTo(DateFormatters.forPattern("epoch_millis")));
}
- public void testForceJava8() {
+ public void testSupportBackwardsJava8Format() {
assertThat(DateFormatter.forPattern("8yyyy-MM-dd"), instanceOf(JavaDateFormatter.class));
// named formats too
assertThat(DateFormatter.forPattern("8date_optional_time"), instanceOf(JavaDateFormatter.class));
@@ -161,27 +160,29 @@ public void testParsingStrictNanoDates() {
}
public void testRoundupFormatterWithEpochDates() {
- assertRoundupFormatter("8epoch_millis", "1234567890", 1234567890L);
+ assertRoundupFormatter("epoch_millis", "1234567890", 1234567890L);
// also check nanos of the epoch_millis formatter if it is rounded up to the nano second
DateTimeFormatter roundUpFormatter = ((JavaDateFormatter) DateFormatter.forPattern("8epoch_millis")).getRoundupParser();
Instant epochMilliInstant = DateFormatters.toZonedDateTime(roundUpFormatter.parse("1234567890")).toInstant();
assertThat(epochMilliInstant.getLong(ChronoField.NANO_OF_SECOND), is(890_999_999L));
- assertRoundupFormatter("8strict_date_optional_time||epoch_millis", "2018-10-10T12:13:14.123Z", 1539173594123L);
- assertRoundupFormatter("8strict_date_optional_time||epoch_millis", "1234567890", 1234567890L);
- assertRoundupFormatter("8uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_millis", "2018-10-10T12:13:14.123", 1539173594123L);
- assertRoundupFormatter("8uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_millis", "1234567890", 1234567890L);
+ assertRoundupFormatter("strict_date_optional_time||epoch_millis", "2018-10-10T12:13:14.123Z", 1539173594123L);
+ assertRoundupFormatter("strict_date_optional_time||epoch_millis", "1234567890", 1234567890L);
+ assertRoundupFormatter("strict_date_optional_time||epoch_millis", "2018-10-10", 1539215999999L);
+ assertRoundupFormatter("uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_millis", "2018-10-10T12:13:14.123", 1539173594123L);
+ assertRoundupFormatter("uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_millis", "1234567890", 1234567890L);
- assertRoundupFormatter("8epoch_second", "1234567890", 1234567890999L);
+ assertRoundupFormatter("epoch_second", "1234567890", 1234567890999L);
// also check nanos of the epoch_millis formatter if it is rounded up to the nano second
DateTimeFormatter epochSecondRoundupParser = ((JavaDateFormatter) DateFormatter.forPattern("8epoch_second")).getRoundupParser();
Instant epochSecondInstant = DateFormatters.toZonedDateTime(epochSecondRoundupParser.parse("1234567890")).toInstant();
assertThat(epochSecondInstant.getLong(ChronoField.NANO_OF_SECOND), is(999_999_999L));
- assertRoundupFormatter("8strict_date_optional_time||epoch_second", "2018-10-10T12:13:14.123Z", 1539173594123L);
- assertRoundupFormatter("8strict_date_optional_time||epoch_second", "1234567890", 1234567890999L);
- assertRoundupFormatter("8uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_second", "2018-10-10T12:13:14.123", 1539173594123L);
- assertRoundupFormatter("8uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_second", "1234567890", 1234567890999L);
+ assertRoundupFormatter("strict_date_optional_time||epoch_second", "2018-10-10T12:13:14.123Z", 1539173594123L);
+ assertRoundupFormatter("strict_date_optional_time||epoch_second", "1234567890", 1234567890999L);
+ assertRoundupFormatter("strict_date_optional_time||epoch_second", "2018-10-10", 1539215999999L);
+ assertRoundupFormatter("uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_second", "2018-10-10T12:13:14.123", 1539173594123L);
+ assertRoundupFormatter("uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_second", "1234567890", 1234567890999L);
}
private void assertRoundupFormatter(String format, String input, long expectedMilliSeconds) {
@@ -194,8 +195,8 @@ private void assertRoundupFormatter(String format, String input, long expectedMi
public void testRoundupFormatterZone() {
ZoneId zoneId = randomZone();
- String format = randomFrom("8epoch_second", "8epoch_millis", "8strict_date_optional_time", "8uuuu-MM-dd'T'HH:mm:ss.SSS",
- "8strict_date_optional_time||date_optional_time");
+ String format = randomFrom("epoch_second", "epoch_millis", "strict_date_optional_time", "uuuu-MM-dd'T'HH:mm:ss.SSS",
+ "strict_date_optional_time||date_optional_time");
JavaDateFormatter formatter = (JavaDateFormatter) DateFormatter.forPattern(format).withZone(zoneId);
DateTimeFormatter roundUpFormatter = formatter.getRoundupParser();
assertThat(roundUpFormatter.getZone(), is(zoneId));
@@ -204,8 +205,8 @@ public void testRoundupFormatterZone() {
public void testRoundupFormatterLocale() {
Locale locale = randomLocale(random());
- String format = randomFrom("8epoch_second", "8epoch_millis", "8strict_date_optional_time", "8uuuu-MM-dd'T'HH:mm:ss.SSS",
- "8strict_date_optional_time||date_optional_time");
+ String format = randomFrom("epoch_second", "epoch_millis", "strict_date_optional_time", "uuuu-MM-dd'T'HH:mm:ss.SSS",
+ "strict_date_optional_time||date_optional_time");
JavaDateFormatter formatter = (JavaDateFormatter) DateFormatter.forPattern(format).withLocale(locale);
DateTimeFormatter roundupParser = formatter.getRoundupParser();
assertThat(roundupParser.getLocale(), is(locale));
diff --git a/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java b/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java
index 8d702ebee8388..2b8d89bc68bae 100644
--- a/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java
+++ b/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java
@@ -39,8 +39,6 @@ public class JavaDateMathParserTests extends ESTestCase {
private final DateMathParser parser = formatter.toDateMathParser();
public void testBasicDates() {
- assertDateMathEquals("2014", "2014-01-01T00:00:00.000");
- assertDateMathEquals("2014-05", "2014-05-01T00:00:00.000");
assertDateMathEquals("2014-05-30", "2014-05-30T00:00:00.000");
assertDateMathEquals("2014-05-30T20", "2014-05-30T20:00:00.000");
assertDateMathEquals("2014-05-30T20:21", "2014-05-30T20:21:00.000");
@@ -125,7 +123,7 @@ public void testMultipleAdjustments() {
}
public void testNow() {
- final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null);
+ final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null).toEpochMilli();
assertDateMathEquals("now", "2014-11-18T14:27:32", now, false, null);
assertDateMathEquals("now+M", "2014-12-18T14:27:32", now, false, null);
@@ -142,11 +140,11 @@ public void testRoundingPreservesEpochAsBaseDate() {
DateMathParser parser = formatter.toDateMathParser();
ZonedDateTime zonedDateTime = DateFormatters.toZonedDateTime(formatter.parse("04:52:20"));
assertThat(zonedDateTime.getYear(), is(1970));
- long millisStart = zonedDateTime.toInstant().toEpochMilli();
+ Instant millisStart = zonedDateTime.toInstant();
assertEquals(millisStart, parser.parse("04:52:20", () -> 0, false, (ZoneId) null));
// due to rounding up, we have to add the number of milliseconds here manually
long millisEnd = DateFormatters.toZonedDateTime(formatter.parse("04:52:20")).toInstant().toEpochMilli() + 999;
- assertEquals(millisEnd, parser.parse("04:52:20", () -> 0, true, (ZoneId) null));
+ assertEquals(millisEnd, parser.parse("04:52:20", () -> 0, true, (ZoneId) null).toEpochMilli());
}
// Implicit rounding happening when parts of the date are not specified
@@ -166,9 +164,10 @@ public void testImplicitRounding() {
// implicit rounding with explicit timezone in the date format
DateFormatter formatter = DateFormatters.forPattern("yyyy-MM-ddXXX");
DateMathParser parser = formatter.toDateMathParser();
- long time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null);
+ Instant time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null);
assertEquals(this.parser.parse("2011-10-09T00:00:00.000+01:00", () -> 0), time);
- time = parser.parse("2011-10-09+01:00", () -> 0, true, (ZoneId) null);
+ time = DateFormatter.forPattern("strict_date_optional_time_nanos").toDateMathParser()
+ .parse("2011-10-09T23:59:59.999+01:00", () -> 0, false, (ZoneId) null);
assertEquals(this.parser.parse("2011-10-09T23:59:59.999+01:00", () -> 0), time);
}
@@ -176,7 +175,6 @@ public void testImplicitRounding() {
public void testExplicitRounding() {
assertDateMathEquals("2014-11-18||/y", "2014-01-01", 0, false, null);
assertDateMathEquals("2014-11-18||/y", "2014-12-31T23:59:59.999", 0, true, null);
- assertDateMathEquals("2014||/y", "2014-01-01", 0, false, null);
assertDateMathEquals("2014-01-01T00:00:00.001||/y", "2014-12-31T23:59:59.999", 0, true, null);
// rounding should also take into account time zone
assertDateMathEquals("2014-11-18||/y", "2013-12-31T23:00:00.000Z", 0, false, ZoneId.of("CET"));
@@ -239,16 +237,16 @@ public void testTimestamps() {
assertDateMathEquals("1418248078000||/m", "2014-12-10T21:47:00.000");
// also check other time units
- DateMathParser parser = DateFormatter.forPattern("8epoch_second||dateOptionalTime").toDateMathParser();
- long datetime = parser.parse("1418248078", () -> 0);
+ DateMathParser parser = DateFormatter.forPattern("epoch_second||dateOptionalTime").toDateMathParser();
+ long datetime = parser.parse("1418248078", () -> 0).toEpochMilli();
assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000");
// a timestamp before 10000 is a year
- assertDateMathEquals("9999", "9999-01-01T00:00:00.000");
+ assertDateMathEquals("9999", "1970-01-01T00:00:09.999Z");
// 10000 is also a year, breaking bwc, used to be a timestamp
- assertDateMathEquals("10000", "10000-01-01T00:00:00.000");
+ assertDateMathEquals("10000", "1970-01-01T00:00:10.000Z");
// but 10000 with T is still a date format
- assertDateMathEquals("10000T", "10000-01-01T00:00:00.000");
+ assertDateMathEquals("10000-01-01T", "10000-01-01T00:00:00.000");
}
void assertParseException(String msg, String date, String exc) {
@@ -266,7 +264,7 @@ public void testIllegalMathFormat() {
public void testIllegalDateFormat() {
assertParseException("Expected bad timestamp exception", Long.toString(Long.MAX_VALUE) + "0", "failed to parse date field");
- assertParseException("Expected bad date format exception", "123bogus", "Unrecognized chars at the end of [123bogus]");
+ assertParseException("Expected bad date format exception", "123bogus", "failed to parse date field [123bogus]");
}
public void testOnlyCallsNowIfNecessary() {
@@ -286,12 +284,12 @@ private void assertDateMathEquals(String toTest, String expected) {
}
private void assertDateMathEquals(String toTest, String expected, final long now, boolean roundUp, ZoneId timeZone) {
- long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone);
+ long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone).toEpochMilli();
assertDateEquals(gotMillis, toTest, expected);
}
private void assertDateEquals(long gotMillis, String original, String expected) {
- long expectedMillis = parser.parse(expected, () -> 0);
+ long expectedMillis = parser.parse(expected, () -> 0).toEpochMilli();
if (gotMillis != expectedMillis) {
ZonedDateTime zonedDateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(gotMillis), ZoneOffset.UTC);
fail("Date math not equal\n" +
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java
index 8b437d25a8495..38b3d5a2f1ff2 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java
@@ -21,21 +21,23 @@
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
+import org.elasticsearch.bootstrap.JavaVersion;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
+import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.DateTimeFormat;
import org.junit.Before;
import java.io.IOException;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.Collection;
import static org.hamcrest.Matchers.containsString;
@@ -173,7 +175,8 @@ public void testIgnoreMalformed() throws Exception {
.endObject()),
XContentType.JSON));
MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
- assertThat(e.getCause().getMessage(), containsString("Cannot parse \"2016-03-99\""));
+ assertThat(e.getCause().getMessage(),
+ containsString("failed to parse date field [2016-03-99] with format [strict_date_optional_time||epoch_millis]"));
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "date")
@@ -217,36 +220,13 @@ public void testChangeFormat() throws IOException {
assertEquals(1457654400000L, pointField.numericValue().longValue());
}
- public void testFloatEpochFormat() throws IOException {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field").field("type", "date")
- .field("format", "epoch_millis").endObject().endObject()
- .endObject().endObject());
-
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
-
- assertEquals(mapping, mapper.mappingSource().toString());
-
- long epochMillis = randomNonNegativeLong();
- String epochFloatValue = epochMillis + "." + randomIntBetween(0, 999);
-
- ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
- .bytes(XContentFactory.jsonBuilder()
- .startObject()
- .field("field", epochFloatValue)
- .endObject()),
- XContentType.JSON));
-
- IndexableField[] fields = doc.rootDoc().getFields("field");
- assertEquals(2, fields.length);
- IndexableField pointField = fields[0];
- assertEquals(epochMillis, pointField.numericValue().longValue());
- }
-
public void testChangeLocale() throws IOException {
+ assumeTrue("need java 9 for testing ",JavaVersion.current().compareTo(JavaVersion.parse("9")) >= 0);
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field").field("type", "date").field("locale", "fr").endObject().endObject()
- .endObject().endObject());
+ .startObject("properties").startObject("field").field("type", "date")
+ .field("format", "E, d MMM yyyy HH:mm:ss Z")
+ .field("locale", "de")
+ .endObject().endObject().endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
@@ -255,7 +235,7 @@ public void testChangeLocale() throws IOException {
mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
- .field("field", 1457654400)
+ .field("field", "Mi., 06 Dez. 2000 02:55:00 -0800")
.endObject()),
XContentType.JSON));
}
@@ -340,12 +320,8 @@ public void testEmptyName() throws IOException {
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}
- /**
- * Test that time zones are correctly parsed by the {@link DateFieldMapper}.
- * There is a known bug with Joda 2.9.4 reported in https://github.com/JodaOrg/joda-time/issues/373.
- */
public void testTimeZoneParsing() throws Exception {
- final String timeZonePattern = "yyyy-MM-dd" + randomFrom("ZZZ", "[ZZZ]", "'['ZZZ']'");
+ final String timeZonePattern = "yyyy-MM-dd" + randomFrom("XXX", "[XXX]", "'['XXX']'");
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type")
@@ -360,20 +336,22 @@ public void testTimeZoneParsing() throws Exception {
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
- final DateTimeZone randomTimeZone = randomBoolean() ? DateTimeZone.forID(randomFrom("UTC", "CET")) : randomDateTimeZone();
- final DateTime randomDate = new DateTime(2016, 03, 11, 0, 0, 0, randomTimeZone);
+ DateFormatter formatter = DateFormatter.forPattern(timeZonePattern);
+ final ZoneId randomTimeZone = randomBoolean() ? ZoneId.of(randomFrom("UTC", "CET")) : randomZone();
+ final ZonedDateTime randomDate = ZonedDateTime.of(2016, 3, 11, 0, 0, 0, 0, randomTimeZone);
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
- .field("field", DateTimeFormat.forPattern(timeZonePattern).print(randomDate))
+ .field("field", formatter.format(randomDate))
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
- assertEquals(randomDate.withZone(DateTimeZone.UTC).getMillis(), fields[0].numericValue().longValue());
+ long millis = randomDate.withZoneSameInstant(ZoneOffset.UTC).toInstant().toEpochMilli();
+ assertEquals(millis, fields[0].numericValue().longValue());
}
public void testMergeDate() throws IOException {
@@ -429,6 +407,6 @@ public void testIllegalFormatField() throws Exception {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping)));
- assertEquals("Invalid format: [[test_format]]: expected string value", e.getMessage());
+ assertEquals("Invalid format: [[test_format]]: Unknown pattern letter: t", e.getMessage());
}
}
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java
index 072170aff09dd..d4058d50f74a2 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java
@@ -33,6 +33,7 @@
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.IndexSettings;
@@ -45,6 +46,7 @@
import org.junit.Before;
import java.io.IOException;
+import java.time.ZoneOffset;
import java.util.Locale;
public class DateFieldTypeTests extends FieldTypeTestCase {
@@ -67,7 +69,7 @@ public void modify(MappedFieldType ft) {
addModifier(new Modifier("locale", false) {
@Override
public void modify(MappedFieldType ft) {
- ((DateFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("date_optional_time").withLocale(Locale.CANADA));
+ ((DateFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("strict_date_optional_time").withLocale(Locale.CANADA));
}
});
nowInMillis = randomNonNegativeLong();
@@ -110,8 +112,10 @@ private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader,
public void testIsFieldWithinQuery() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
- long instant1 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda("2015-10-12").getMillis();
- long instant2 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda("2016-04-03").getMillis();
+ long instant1 =
+ DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-12")).toInstant().toEpochMilli();
+ long instant2 =
+ DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2016-04-03")).toInstant().toEpochMilli();
Document doc = new Document();
LongPoint field = new LongPoint("my_date", instant1);
doc.add(field);
@@ -138,25 +142,27 @@ public void testIsFieldWithinQuery() throws IOException {
public void testValueFormat() {
MappedFieldType ft = createDefaultFieldType();
- long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda("2015-10-12T14:10:55").getMillis();
+ long instant = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-12T14:10:55"))
+ .toInstant().toEpochMilli();
+
assertEquals("2015-10-12T14:10:55.000Z",
- ft.docValueFormat(null, DateTimeZone.UTC).format(instant));
+ ft.docValueFormat(null, ZoneOffset.UTC).format(instant));
assertEquals("2015-10-12T15:10:55.000+01:00",
- ft.docValueFormat(null, DateTimeZone.forOffsetHours(1)).format(instant));
+ ft.docValueFormat(null, ZoneOffset.ofHours(1)).format(instant));
assertEquals("2015",
- createDefaultFieldType().docValueFormat("yyyy", DateTimeZone.UTC).format(instant));
+ createDefaultFieldType().docValueFormat("YYYY", ZoneOffset.UTC).format(instant));
assertEquals(instant,
- ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", false, null));
+ ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12T14:10:55", false, null));
assertEquals(instant + 999,
- ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", true, null));
- assertEquals(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda("2015-10-13").getMillis() - 1,
- ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12||/d", true, null));
+ ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12T14:10:55", true, null));
+ long i = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-13")).toInstant().toEpochMilli();
+ assertEquals(i - 1, ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12||/d", true, null));
}
public void testValueForSearch() {
MappedFieldType ft = createDefaultFieldType();
String date = "2015-10-12T12:09:55.000Z";
- long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date).getMillis();
+ long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date);
assertEquals(date, ft.valueForDisplay(instant));
}
@@ -170,7 +176,7 @@ public void testTermQuery() {
MappedFieldType ft = createDefaultFieldType();
ft.setName("field");
String date = "2015-10-12T14:10:55";
- long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date).getMillis();
+ long instant = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date)).toInstant().toEpochMilli();
ft.setIndexOptions(IndexOptions.DOCS);
Query expected = new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant, instant + 999),
@@ -193,8 +199,9 @@ public void testRangeQuery() throws IOException {
ft.setName("field");
String date1 = "2015-10-12T14:10:55";
String date2 = "2016-04-28T11:33:52";
- long instant1 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date1).getMillis();
- long instant2 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date2).getMillis() + 999;
+ long instant1 = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date1)).toInstant().toEpochMilli();
+ long instant2 =
+ DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date2)).toInstant().toEpochMilli() + 999;
ft.setIndexOptions(IndexOptions.DOCS);
Query expected = new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant1, instant2),
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java
index 56e6f5e4c6b04..b3539d9994334 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java
@@ -42,6 +42,7 @@
import org.elasticsearch.test.InternalSettingsPlugin;
import java.io.IOException;
+import java.time.Instant;
import java.util.Collection;
import java.util.Collections;
@@ -455,7 +456,7 @@ public void testReuseExistingMappings() throws IOException, Exception {
.field("my_field3", 44)
.field("my_field4", 45)
.field("my_field5", 46)
- .field("my_field6", 47)
+ .field("my_field6", Instant.now().toEpochMilli())
.field("my_field7", true)
.endObject());
Mapper myField1Mapper = null;
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java
index ff44dec81d962..0b066fbd7162d 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java
@@ -60,8 +60,6 @@ public void testMatchTypeOnly() throws Exception {
assertThat(mapperService.fullName("l"), notNullValue());
assertNotSame(IndexOptions.NONE, mapperService.fullName("l").indexOptions());
-
-
}
public void testSimple() throws Exception {
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java
index fcb78f66add5e..65dcd396ed740 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java
@@ -458,7 +458,7 @@ public void testIllegalFormatField() throws Exception {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping)));
- assertEquals("Invalid format: [[test_format]]: expected string value", e.getMessage());
+ assertEquals("Invalid format: [[test_format]]: Unknown pattern letter: t", e.getMessage());
}
}
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java
index 34e7081d51d5d..699f85f1b12b1 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java
@@ -104,11 +104,12 @@ public void testDateRangeQuery() throws Exception {
DateMathParser parser = type.dateMathParser;
Query query = new QueryStringQueryBuilder(DATE_RANGE_FIELD_NAME + ":[2010-01-01 TO 2018-01-01]").toQuery(createShardContext());
Query range = LongRange.newIntersectsQuery(DATE_RANGE_FIELD_NAME,
- new long[]{ parser.parse("2010-01-01", () -> 0)}, new long[]{ parser.parse("2018-01-01", () -> 0)});
+ new long[]{ parser.parse("2010-01-01", () -> 0).toEpochMilli()},
+ new long[]{ parser.parse("2018-01-01", () -> 0).toEpochMilli()});
Query dv = RangeFieldMapper.RangeType.DATE.dvRangeQuery(DATE_RANGE_FIELD_NAME,
BinaryDocValuesRangeQuery.QueryType.INTERSECTS,
- parser.parse("2010-01-01", () -> 0),
- parser.parse("2018-01-01", () -> 0), true, true);
+ parser.parse("2010-01-01", () -> 0).toEpochMilli(),
+ parser.parse("2018-01-01", () -> 0).toEpochMilli(), true, true);
assertEquals(new IndexOrDocValuesQuery(range, dv), query);
}
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java
index f04a193ef96b2..6ca98fb4db6d2 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java
@@ -48,6 +48,8 @@
import java.net.InetAddress;
import java.util.Locale;
+import static org.hamcrest.Matchers.containsString;
+
public class RangeFieldTypeTests extends FieldTypeTestCase {
RangeType type;
protected static String FIELDNAME = "field";
@@ -111,17 +113,18 @@ public void testDateRangeQueryUsingMappingFormat() {
fieldType.setHasDocValues(false);
ShapeRelation relation = randomFrom(ShapeRelation.values());
- // dates will break the default format
+ // dates will break the default format, month/day of month is turned around in the format
final String from = "2016-15-06T15:29:50+08:00";
final String to = "2016-16-06T15:29:50+08:00";
ElasticsearchParseException ex = expectThrows(ElasticsearchParseException.class,
() -> fieldType.rangeQuery(from, to, true, true, relation, null, null, context));
- assertEquals("failed to parse date field [2016-15-06T15:29:50+08:00] with format [strict_date_optional_time||epoch_millis]",
- ex.getMessage());
+ assertThat(ex.getMessage(),
+ containsString("failed to parse date field [2016-15-06T15:29:50+08:00] with format [strict_date_optional_time||epoch_millis]")
+ );
// setting mapping format which is compatible with those dates
- final DateFormatter formatter = DateFormatter.forPattern("yyyy-dd-MM'T'HH:mm:ssZZ");
+ final DateFormatter formatter = DateFormatter.forPattern("yyyy-dd-MM'T'HH:mm:ssZZZZZ");
assertEquals(1465975790000L, formatter.parseMillis(from));
assertEquals(1466062190000L, formatter.parseMillis(to));
diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
index 0eb6de7da252f..6f72277007dd5 100644
--- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
+++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
@@ -64,9 +64,10 @@
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.hamcrest.Matchers;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.DateTimeException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -177,7 +178,7 @@ protected QueryStringQueryBuilder doCreateTestQueryBuilder() {
queryStringQueryBuilder.minimumShouldMatch(randomMinimumShouldMatch());
}
if (randomBoolean()) {
- queryStringQueryBuilder.timeZone(randomDateTimeZone().getID());
+ queryStringQueryBuilder.timeZone(randomZone().getId());
}
if (randomBoolean()) {
queryStringQueryBuilder.autoGenerateSynonymsPhraseQuery(randomBoolean());
@@ -211,7 +212,7 @@ public QueryStringQueryBuilder mutateInstance(QueryStringQueryBuilder instance)
String quoteFieldSuffix = instance.quoteFieldSuffix();
Float tieBreaker = instance.tieBreaker();
String minimumShouldMatch = instance.minimumShouldMatch();
- String timeZone = instance.timeZone() == null ? null : instance.timeZone().getID();
+ String timeZone = instance.timeZone() == null ? null : instance.timeZone().getId();
boolean autoGenerateSynonymsPhraseQuery = instance.autoGenerateSynonymsPhraseQuery();
boolean fuzzyTranspositions = instance.fuzzyTranspositions();
@@ -319,12 +320,12 @@ public QueryStringQueryBuilder mutateInstance(QueryStringQueryBuilder instance)
break;
case 20:
if (timeZone == null) {
- timeZone = randomDateTimeZone().getID();
+ timeZone = randomZone().getId();
} else {
if (randomBoolean()) {
timeZone = null;
} else {
- timeZone = randomValueOtherThan(timeZone, () -> randomDateTimeZone().getID());
+ timeZone = randomValueOtherThan(timeZone, () -> randomZone().getId());
}
}
break;
@@ -848,7 +849,7 @@ public void testTimezone() throws Exception {
QueryBuilder queryBuilder = parseQuery(queryAsString);
assertThat(queryBuilder, instanceOf(QueryStringQueryBuilder.class));
QueryStringQueryBuilder queryStringQueryBuilder = (QueryStringQueryBuilder) queryBuilder;
- assertThat(queryStringQueryBuilder.timeZone(), equalTo(DateTimeZone.forID("Europe/Paris")));
+ assertThat(queryStringQueryBuilder.timeZone(), equalTo(ZoneId.of("Europe/Paris")));
String invalidQueryAsString = "{\n" +
" \"query_string\":{\n" +
@@ -856,7 +857,7 @@ public void testTimezone() throws Exception {
" \"query\":\"" + DATE_FIELD_NAME + ":[2012 TO 2014]\"\n" +
" }\n" +
"}";
- expectThrows(IllegalArgumentException.class, () -> parseQuery(invalidQueryAsString));
+ expectThrows(DateTimeException.class, () -> parseQuery(invalidQueryAsString));
}
public void testToQueryBooleanQueryMultipleBoosts() throws Exception {
diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java
index df312ba84c309..52f2c89d645f9 100644
--- a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java
+++ b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java
@@ -44,10 +44,12 @@
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import org.joda.time.chrono.ISOChronology;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.HashMap;
import java.util.Map;
@@ -72,18 +74,22 @@ protected RangeQueryBuilder doCreateTestQueryBuilder() {
break;
case 1:
// use mapped date field, using date string representation
+ Instant now = Instant.now();
+ ZonedDateTime start = now.minusMillis(randomIntBetween(0, 1000000)).atZone(ZoneOffset.UTC);
+ ZonedDateTime end = now.plusMillis(randomIntBetween(0, 1000000)).atZone(ZoneOffset.UTC);
query = new RangeQueryBuilder(randomFrom(
DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, DATE_ALIAS_FIELD_NAME));
- query.from(new DateTime(System.currentTimeMillis() - randomIntBetween(0, 1000000), DateTimeZone.UTC).toString());
- query.to(new DateTime(System.currentTimeMillis() + randomIntBetween(0, 1000000), DateTimeZone.UTC).toString());
+ query.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(start));
+ query.to(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(end));
// Create timestamp option only then we have a date mapper,
// otherwise we could trigger exception.
if (createShardContext().getMapperService().fullName(DATE_FIELD_NAME) != null) {
if (randomBoolean()) {
- query.timeZone(randomDateTimeZone().getID());
+ query.timeZone(randomZone().getId());
}
if (randomBoolean()) {
- query.format("yyyy-MM-dd'T'HH:mm:ss.SSSZZ");
+ String format = "strict_date_optional_time";
+ query.format(format);
}
}
break;
@@ -444,7 +450,7 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC
DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC());
query.from(queryFromValue);
query.to(queryToValue);
- query.timeZone(randomDateTimeZone().getID());
+ query.timeZone(randomZone().getId());
query.format("yyyy-MM-dd");
QueryShardContext queryShardContext = createShardContext();
QueryBuilder rewritten = query.rewrite(queryShardContext);
diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java
index 6d1db852a85a4..7e13b38fd3d25 100644
--- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java
+++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java
@@ -26,6 +26,7 @@
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.index.cache.request.RequestCacheStats;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
@@ -34,8 +35,8 @@
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import org.elasticsearch.test.junit.annotations.TestLogging;
-import org.joda.time.DateTimeZone;
+import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
@@ -68,7 +69,7 @@ public void testCacheAggs() throws Exception {
// which used to not work well with the query cache because of the handles stream output
// see #9500
final SearchResponse r1 = client.prepareSearch("index").setSize(0).setSearchType(SearchType.QUERY_THEN_FETCH)
- .addAggregation(dateHistogram("histo").field("f").timeZone(DateTimeZone.forID("+01:00")).minDocCount(0)
+ .addAggregation(dateHistogram("histo").field("f").timeZone(ZoneId.of("+01:00")).minDocCount(0)
.dateHistogramInterval(DateHistogramInterval.MONTH))
.get();
assertSearchResponse(r1);
@@ -80,7 +81,7 @@ public void testCacheAggs() throws Exception {
for (int i = 0; i < 10; ++i) {
final SearchResponse r2 = client.prepareSearch("index").setSize(0)
.setSearchType(SearchType.QUERY_THEN_FETCH).addAggregation(dateHistogram("histo").field("f")
- .timeZone(DateTimeZone.forID("+01:00")).minDocCount(0).dateHistogramInterval(DateHistogramInterval.MONTH))
+ .timeZone(ZoneId.of("+01:00")).minDocCount(0).dateHistogramInterval(DateHistogramInterval.MONTH))
.get();
assertSearchResponse(r2);
Histogram h1 = r1.getAggregations().get("histo");
@@ -246,15 +247,16 @@ public void testQueryRewriteDatesWithNow() throws Exception {
assertAcked(client.admin().indices().prepareCreate("index-3").addMapping("type", "d", "type=date")
.setSettings(settings).get());
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
- indexRandom(true, client.prepareIndex("index-1", "type", "1").setSource("d", now),
- client.prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1)),
- client.prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2)),
- client.prepareIndex("index-2", "type", "4").setSource("d", now.minusDays(3)),
- client.prepareIndex("index-2", "type", "5").setSource("d", now.minusDays(4)),
- client.prepareIndex("index-2", "type", "6").setSource("d", now.minusDays(5)),
- client.prepareIndex("index-3", "type", "7").setSource("d", now.minusDays(6)),
- client.prepareIndex("index-3", "type", "8").setSource("d", now.minusDays(7)),
- client.prepareIndex("index-3", "type", "9").setSource("d", now.minusDays(8)));
+ DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time");
+ indexRandom(true, client.prepareIndex("index-1", "type", "1").setSource("d", formatter.format(now)),
+ client.prepareIndex("index-1", "type", "2").setSource("d", formatter.format(now.minusDays(1))),
+ client.prepareIndex("index-1", "type", "3").setSource("d", formatter.format(now.minusDays(2))),
+ client.prepareIndex("index-2", "type", "4").setSource("d", formatter.format(now.minusDays(3))),
+ client.prepareIndex("index-2", "type", "5").setSource("d", formatter.format(now.minusDays(4))),
+ client.prepareIndex("index-2", "type", "6").setSource("d", formatter.format(now.minusDays(5))),
+ client.prepareIndex("index-3", "type", "7").setSource("d", formatter.format(now.minusDays(6))),
+ client.prepareIndex("index-3", "type", "8").setSource("d", formatter.format(now.minusDays(7))),
+ client.prepareIndex("index-3", "type", "9").setSource("d", formatter.format(now.minusDays(8))));
ensureSearchable("index-1", "index-2", "index-3");
assertCacheState(client, "index-1", 0, 0);
assertCacheState(client, "index-2", 0, 0);
diff --git a/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java b/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java
index 81f5c7982d4d8..4486d4ff83ffb 100644
--- a/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java
+++ b/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java
@@ -29,8 +29,8 @@
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.test.ESTestCase;
-import org.joda.time.DateTimeZone;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.List;
@@ -60,15 +60,15 @@ public void testSerialization() throws Exception {
assertEquals(DocValueFormat.Decimal.class, vf.getClass());
assertEquals("###.##", ((DocValueFormat.Decimal) vf).pattern);
- DocValueFormat.DateTime dateFormat =
- new DocValueFormat.DateTime(DateFormatter.forPattern("epoch_second"), DateTimeZone.forOffsetHours(1));
+ DateFormatter formatter = DateFormatter.forPattern("epoch_second");
+ DocValueFormat.DateTime dateFormat = new DocValueFormat.DateTime(formatter, ZoneOffset.ofHours(1));
out = new BytesStreamOutput();
out.writeNamedWriteable(dateFormat);
in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry);
vf = in.readNamedWriteable(DocValueFormat.class);
assertEquals(DocValueFormat.DateTime.class, vf.getClass());
assertEquals("epoch_second", ((DocValueFormat.DateTime) vf).formatter.pattern());
- assertEquals(DateTimeZone.forOffsetHours(1), ((DocValueFormat.DateTime) vf).timeZone);
+ assertEquals(ZoneOffset.ofHours(1), ((DocValueFormat.DateTime) vf).timeZone);
out = new BytesStreamOutput();
out.writeNamedWriteable(DocValueFormat.GEOHASH);
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java
index 3a10edf183376..a54f30ffac0d1 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java
@@ -36,7 +36,7 @@ protected AutoDateHistogramAggregationBuilder createTestAggregatorBuilder() {
builder.missing(randomIntBetween(0, 10));
}
if (randomBoolean()) {
- builder.timeZone(randomDateTimeZone());
+ builder.timeZone(randomZone());
}
return builder;
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
index f65f2bde9662a..c59be546acd1a 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
@@ -22,12 +22,12 @@
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.bootstrap.JavaVersion;
import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
-import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.query.MatchNoneQueryBuilder;
@@ -46,13 +46,14 @@
import org.elasticsearch.search.aggregations.metrics.Sum;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matchers;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.DateTimeFormat;
import org.junit.After;
import java.io.IOException;
+import java.time.Instant;
import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
+import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -83,21 +84,21 @@
@ESIntegTestCase.SuiteScopeTestCase
public class DateHistogramIT extends ESIntegTestCase {
- static Map> expectedMultiSortBuckets;
+ static Map> expectedMultiSortBuckets;
- private DateTime date(int month, int day) {
- return new DateTime(2012, month, day, 0, 0, DateTimeZone.UTC);
+ private ZonedDateTime date(int month, int day) {
+ return ZonedDateTime.of(2012, month, day, 0, 0, 0, 0, ZoneOffset.UTC);
}
- private DateTime date(String date) {
- return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date);
+ private ZonedDateTime date(String date) {
+ return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date));
}
- private static String format(DateTime date, String pattern) {
- return DateTimeFormat.forPattern(pattern).print(date);
+ private static String format(ZonedDateTime date, String pattern) {
+ return DateFormatter.forPattern(pattern).format(date);
}
- private IndexRequestBuilder indexDoc(String idx, DateTime date, int value) throws Exception {
+ private IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception {
return client().prepareIndex(idx, "type").setSource(jsonBuilder()
.startObject()
.timeField("date", date)
@@ -142,7 +143,7 @@ public void setupSuiteScopeCluster() throws Exception {
ensureSearchable();
}
- private void addExpectedBucket(DateTime key, long docCount, double avg, double sum) {
+ private void addExpectedBucket(ZonedDateTime key, long docCount, double avg, double sum) {
Map bucketProps = new HashMap<>();
bucketProps.put("_count", docCount);
bucketProps.put("avg_l", avg);
@@ -196,13 +197,12 @@ public void afterEachTest() throws IOException {
internalCluster().wipeIndices("idx2");
}
- private static String getBucketKeyAsString(DateTime key) {
- return getBucketKeyAsString(key, DateTimeZone.UTC);
+ private static String getBucketKeyAsString(ZonedDateTime key) {
+ return getBucketKeyAsString(key, ZoneOffset.UTC);
}
- private static String getBucketKeyAsString(DateTime key, DateTimeZone tz) {
- ZoneId zoneId = DateUtils.dateTimeZoneToZoneId(tz);
- return DateFormatter.forPattern(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.pattern()).withZone(zoneId).formatJoda(key);
+ private static String getBucketKeyAsString(ZonedDateTime key, ZoneId tz) {
+ return DateFormatter.forPattern("strict_date_optional_time").withZone(tz).format(key);
}
public void testSingleValuedField() throws Exception {
@@ -218,35 +218,34 @@ public void testSingleValuedField() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
- DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
public void testSingleValuedFieldWithTimeZone() throws Exception {
SearchResponse response = client().prepareSearch("idx")
- .addAggregation(dateHistogram("histo").field("date")
- .dateHistogramInterval(DateHistogramInterval.DAY)
- .minDocCount(1)
- .timeZone(DateTimeZone.forID("+01:00"))).get();
- DateTimeZone tz = DateTimeZone.forID("+01:00");
+ .addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(1)
+ .timeZone(ZoneId.of("+01:00"))).execute()
+ .actionGet();
+ ZoneId tz = ZoneId.of("+01:00");
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
@@ -255,46 +254,46 @@ public void testSingleValuedFieldWithTimeZone() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(6));
- DateTime key = new DateTime(2012, 1, 1, 23, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 23, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 2, 1, 23, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 2, 14, 23, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 14, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 3, 1, 23, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 3, 14, 23, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 14, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(4);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 3, 22, 23, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 22, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(5);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
}
@@ -304,7 +303,7 @@ public void testSingleValued_timeZone_epoch() throws Exception {
if (randomBoolean()) {
format = format + "||date_optional_time";
}
- DateTimeZone tz = DateTimeZone.forID("+01:00");
+ ZoneId tz = ZoneId.of("+01:00");
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo").field("date")
.dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(1)
@@ -318,21 +317,25 @@ public void testSingleValued_timeZone_epoch() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(6));
- List expectedKeys = new ArrayList<>();
- expectedKeys.add(new DateTime(2012, 1, 1, 23, 0, DateTimeZone.UTC));
- expectedKeys.add(new DateTime(2012, 2, 1, 23, 0, DateTimeZone.UTC));
- expectedKeys.add(new DateTime(2012, 2, 14, 23, 0, DateTimeZone.UTC));
- expectedKeys.add(new DateTime(2012, 3, 1, 23, 0, DateTimeZone.UTC));
- expectedKeys.add(new DateTime(2012, 3, 14, 23, 0, DateTimeZone.UTC));
- expectedKeys.add(new DateTime(2012, 3, 22, 23, 0, DateTimeZone.UTC));
-
+ List expectedKeys = new ArrayList<>();
+ expectedKeys.add(ZonedDateTime.of(2012, 1, 1, 23, 0, 0, 0, ZoneOffset.UTC));
+ expectedKeys.add(ZonedDateTime.of(2012, 2, 1, 23, 0, 0, 0, ZoneOffset.UTC));
+ expectedKeys.add(ZonedDateTime.of(2012, 2, 14, 23, 0, 0, 0, ZoneOffset.UTC));
+ expectedKeys.add(ZonedDateTime.of(2012, 3, 1, 23, 0, 0, 0, ZoneOffset.UTC));
+ expectedKeys.add(ZonedDateTime.of(2012, 3, 14, 23, 0, 0, 0, ZoneOffset.UTC));
+ expectedKeys.add(ZonedDateTime.of(2012, 3, 22, 23, 0, 0, 0, ZoneOffset.UTC));
- Iterator keyIterator = expectedKeys.iterator();
+ Iterator keyIterator = expectedKeys.iterator();
for (Histogram.Bucket bucket : buckets) {
assertThat(bucket, notNullValue());
- DateTime expectedKey = keyIterator.next();
- assertThat(bucket.getKeyAsString(), equalTo(Long.toString(expectedKey.getMillis() / millisDivider)));
- assertThat(((DateTime) bucket.getKey()), equalTo(expectedKey));
+ ZonedDateTime expectedKey = keyIterator.next();
+ String bucketKey = bucket.getKeyAsString();
+ String expectedBucketName = Long.toString(expectedKey.toInstant().toEpochMilli() / millisDivider);
+ if (JavaVersion.current().getVersion().get(0) == 8 && bucket.getKeyAsString().endsWith(".0")) {
+ expectedBucketName = expectedBucketName + ".0";
+ }
+ assertThat(bucketKey, equalTo(expectedBucketName));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(expectedKey));
assertThat(bucket.getDocCount(), equalTo(1L));
}
}
@@ -355,7 +358,7 @@ public void testSingleValuedFieldOrderedByKeyAsc() throws Exception {
int i = 0;
for (Histogram.Bucket bucket : buckets) {
- assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i++;
}
}
@@ -377,7 +380,7 @@ public void testSingleValuedFieldOrderedByKeyDesc() throws Exception {
int i = 2;
for (Histogram.Bucket bucket : histo.getBuckets()) {
- assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i--;
}
}
@@ -399,7 +402,7 @@ public void testSingleValuedFieldOrderedByCountAsc() throws Exception {
int i = 0;
for (Histogram.Bucket bucket : histo.getBuckets()) {
- assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i++;
}
}
@@ -421,7 +424,7 @@ public void testSingleValuedFieldOrderedByCountDesc() throws Exception {
int i = 2;
for (Histogram.Bucket bucket : histo.getBuckets()) {
- assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i--;
}
}
@@ -444,42 +447,42 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception {
Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)histo).getProperty("_count");
Object[] propertiesCounts = (Object[]) ((InternalAggregation)histo).getProperty("sum.value");
- DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(1.0));
- assertThat((DateTime) propertiesKeys[0], equalTo(key));
+ assertThat((ZonedDateTime) propertiesKeys[0], equalTo(key));
assertThat((long) propertiesDocCounts[0], equalTo(1L));
assertThat((double) propertiesCounts[0], equalTo(1.0));
- key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(5.0));
- assertThat((DateTime) propertiesKeys[1], equalTo(key));
+ assertThat((ZonedDateTime) propertiesKeys[1], equalTo(key));
assertThat((long) propertiesDocCounts[1], equalTo(2L));
assertThat((double) propertiesCounts[1], equalTo(5.0));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(15.0));
- assertThat((DateTime) propertiesKeys[2], equalTo(key));
+ assertThat((ZonedDateTime) propertiesKeys[2], equalTo(key));
assertThat((long) propertiesDocCounts[2], equalTo(3L));
assertThat((double) propertiesCounts[2], equalTo(15.0));
}
@@ -502,7 +505,7 @@ public void testSingleValuedFieldOrderedBySubAggregationAsc() throws Exception {
int i = 0;
for (Histogram.Bucket bucket : histo.getBuckets()) {
- assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i++;
}
}
@@ -525,7 +528,7 @@ public void testSingleValuedFieldOrderedBySubAggregationDesc() throws Exception
int i = 2;
for (Histogram.Bucket bucket : histo.getBuckets()) {
- assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i--;
}
}
@@ -548,7 +551,7 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationDesc() throws
int i = 2;
for (Histogram.Bucket bucket : histo.getBuckets()) {
- assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i--;
}
}
@@ -625,25 +628,25 @@ public void testSingleValuedFieldWithValueScript() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
- DateTime key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
- key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@@ -669,32 +672,32 @@ public void testMultiValuedField() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(4));
- DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(5L));
- key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@@ -763,32 +766,32 @@ public void testMultiValuedFieldWithValueScript() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(4));
- DateTime key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
- key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(5L));
- key = new DateTime(2012, 5, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 5, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@@ -817,25 +820,25 @@ public void testScriptSingleValue() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
- DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@@ -856,32 +859,32 @@ public void testScriptMultiValued() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(4));
- DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(5L));
- key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@@ -922,25 +925,25 @@ public void testPartiallyUnmapped() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
- DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@@ -971,7 +974,7 @@ public void testEmptyAggregation() throws Exception {
public void testSingleValueWithTimeZone() throws Exception {
prepareCreate("idx2").addMapping("type", "date", "type=date").get();
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
- DateTime date = date("2014-03-11T00:00:00+00:00");
+ ZonedDateTime date = date("2014-03-11T00:00:00+00:00");
for (int i = 0; i < reqs.length; i++) {
reqs[i] = client().prepareIndex("idx2", "type", "" + i)
.setSource(jsonBuilder().startObject().timeField("date", date).endObject());
@@ -983,9 +986,9 @@ public void testSingleValueWithTimeZone() throws Exception {
.setQuery(matchAllQuery())
.addAggregation(dateHistogram("date_histo")
.field("date")
- .timeZone(DateTimeZone.forID("-02:00"))
+ .timeZone(ZoneId.of("-02:00"))
.dateHistogramInterval(DateHistogramInterval.DAY)
- .format("yyyy-MM-dd:HH-mm-ssZZ"))
+ .format("yyyy-MM-dd:HH-mm-ssZZZZZ"))
.get();
assertThat(response.getHits().getTotalHits().value, equalTo(5L));
@@ -1010,8 +1013,9 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception {
// we're testing on days, so the base must be rounded to a day
int interval = randomIntBetween(1, 2); // in days
long intervalMillis = interval * 24 * 60 * 60 * 1000;
- DateTime base = new DateTime(DateTimeZone.UTC).dayOfMonth().roundFloorCopy();
- DateTime baseKey = new DateTime(intervalMillis * (base.getMillis() / intervalMillis), DateTimeZone.UTC);
+ ZonedDateTime base = ZonedDateTime.now(ZoneOffset.UTC).withDayOfMonth(1);
+ ZonedDateTime baseKey = Instant.ofEpochMilli(intervalMillis * (base.toInstant().toEpochMilli() / intervalMillis))
+ .atZone(ZoneOffset.UTC);
prepareCreate("idx2")
.setSettings(
@@ -1028,7 +1032,7 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception {
} else {
int docCount = randomIntBetween(1, 3);
for (int j = 0; j < docCount; j++) {
- DateTime date = baseKey.plusDays(i * interval + randomIntBetween(0, interval - 1));
+ ZonedDateTime date = baseKey.plusDays(i * interval + randomIntBetween(0, interval - 1));
builders.add(indexDoc("idx2", date, j));
}
docCounts[i] = docCount;
@@ -1037,19 +1041,19 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception {
indexRandom(true, builders);
ensureSearchable("idx2");
- DateTime lastDataBucketKey = baseKey.plusDays((numOfBuckets - 1) * interval);
+ ZonedDateTime lastDataBucketKey = baseKey.plusDays((numOfBuckets - 1) * interval);
// randomizing the number of buckets on the min bound
// (can sometimes fall within the data range, but more frequently will fall before the data range)
int addedBucketsLeft = randomIntBetween(0, numOfBuckets);
- DateTime boundsMinKey;
+ ZonedDateTime boundsMinKey;
if (frequently()) {
boundsMinKey = baseKey.minusDays(addedBucketsLeft * interval);
} else {
boundsMinKey = baseKey.plusDays(addedBucketsLeft * interval);
addedBucketsLeft = 0;
}
- DateTime boundsMin = boundsMinKey.plusDays(randomIntBetween(0, interval - 1));
+ ZonedDateTime boundsMin = boundsMinKey.plusDays(randomIntBetween(0, interval - 1));
// randomizing the number of buckets on the max bound
// (can sometimes fall within the data range, but more frequently will fall after the data range)
@@ -1059,8 +1063,8 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception {
addedBucketsRight = 0;
boundsMaxKeyDelta = -boundsMaxKeyDelta;
}
- DateTime boundsMaxKey = lastDataBucketKey.plusDays(boundsMaxKeyDelta);
- DateTime boundsMax = boundsMaxKey.plusDays(randomIntBetween(0, interval - 1));
+ ZonedDateTime boundsMaxKey = lastDataBucketKey.plusDays(boundsMaxKeyDelta);
+ ZonedDateTime boundsMax = boundsMaxKey.plusDays(randomIntBetween(0, interval - 1));
// it could be that the random bounds.min we chose ended up greater than
// bounds.max - this should
@@ -1105,11 +1109,11 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(bucketsCount));
- DateTime key = baseKey.isBefore(boundsMinKey) ? baseKey : boundsMinKey;
+ ZonedDateTime key = baseKey.isBefore(boundsMinKey) ? baseKey : boundsMinKey;
for (int i = 0; i < bucketsCount; i++) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getKeyAsString(), equalTo(format(key, pattern)));
assertThat(bucket.getDocCount(), equalTo(extendedValueCounts[i]));
key = key.plusDays(interval);
@@ -1126,15 +1130,15 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception {
.setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1).put("index.number_of_replicas", 0))
.get();
- DateMathParser parser = Joda.getStrictStandardDateFormatter().toDateMathParser();
+ DateMathParser parser = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis").toDateMathParser();
// we pick a random timezone offset of +12/-12 hours and insert two documents
// one at 00:00 in that time zone and one at 12:00
List builders = new ArrayList<>();
int timeZoneHourOffset = randomIntBetween(-12, 12);
- DateTimeZone timezone = DateTimeZone.forOffsetHours(timeZoneHourOffset);
- DateTime timeZoneStartToday = new DateTime(parser.parse("now/d", System::currentTimeMillis, false, timezone), DateTimeZone.UTC);
- DateTime timeZoneNoonToday = new DateTime(parser.parse("now/d+12h", System::currentTimeMillis, false, timezone), DateTimeZone.UTC);
+ ZoneId timezone = ZoneOffset.ofHours(timeZoneHourOffset);
+ ZonedDateTime timeZoneStartToday = parser.parse("now/d", System::currentTimeMillis, false, timezone).atZone(ZoneOffset.UTC);
+ ZonedDateTime timeZoneNoonToday = parser.parse("now/d+12h", System::currentTimeMillis, false, timezone).atZone(ZoneOffset.UTC);
builders.add(indexDoc(index, timeZoneStartToday, 1));
builders.add(indexDoc(index, timeZoneNoonToday, 2));
indexRandom(true, builders);
@@ -1145,7 +1149,7 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception {
response = client()
.prepareSearch(index)
.setQuery(QueryBuilders.rangeQuery("date")
- .from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getID()))
+ .from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getId()))
.addAggregation(
dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.hours(1))
.timeZone(timezone).minDocCount(0).extendedBounds(new ExtendedBounds("now/d", "now/d+23h"))
@@ -1164,8 +1168,8 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception {
for (int i = 0; i < buckets.size(); i++) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
- assertThat("InternalBucket " + i + " had wrong key", (DateTime) bucket.getKey(),
- equalTo(new DateTime(timeZoneStartToday.getMillis() + (i * 60 * 60 * 1000), DateTimeZone.UTC)));
+ ZonedDateTime zonedDateTime = timeZoneStartToday.plus(i * 60 * 60 * 1000, ChronoUnit.MILLIS);
+ assertThat("InternalBucket " + i + " had wrong key", (ZonedDateTime) bucket.getKey(), equalTo(zonedDateTime));
if (i == 0 || i == 12) {
assertThat(bucket.getDocCount(), equalTo(1L));
} else {
@@ -1186,10 +1190,11 @@ public void testSingleValueFieldWithExtendedBoundsOffset() throws Exception {
.get();
List builders = new ArrayList<>();
- builders.add(indexDoc(index, DateTime.parse("2016-01-03T08:00:00.000Z"), 1));
- builders.add(indexDoc(index, DateTime.parse("2016-01-03T08:00:00.000Z"), 2));
- builders.add(indexDoc(index, DateTime.parse("2016-01-06T08:00:00.000Z"), 3));
- builders.add(indexDoc(index, DateTime.parse("2016-01-06T08:00:00.000Z"), 4));
+ DateFormatter formatter = DateFormatter.forPattern("date_optional_time");
+ builders.add(indexDoc(index, DateFormatters.toZonedDateTime(formatter.parse("2016-01-03T08:00:00.000Z")), 1));
+ builders.add(indexDoc(index, DateFormatters.toZonedDateTime(formatter.parse("2016-01-03T08:00:00.000Z")), 2));
+ builders.add(indexDoc(index, DateFormatters.toZonedDateTime(formatter.parse("2016-01-06T08:00:00.000Z")), 3));
+ builders.add(indexDoc(index, DateFormatters.toZonedDateTime(formatter.parse("2016-01-06T08:00:00.000Z")), 4));
indexRandom(true, builders);
ensureSearchable(index);
@@ -1233,7 +1238,7 @@ public void testSingleValueFieldWithExtendedBoundsOffset() throws Exception {
public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception {
String mappingJson = Strings.toString(jsonBuilder().startObject()
.startObject("type").startObject("properties")
- .startObject("date").field("type", "date").field("format", "dateOptionalTime||dd-MM-yyyy")
+ .startObject("date").field("type", "date").field("format", "strict_date_optional_time||dd-MM-yyyy")
.endObject().endObject().endObject().endObject());
prepareCreate("idx2").addMapping("type", mappingJson, XContentType.JSON).get();
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
@@ -1256,23 +1261,23 @@ public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception
List extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(1));
- DateTime key = new DateTime(2014, 3, 10, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2014, 3, 10, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(5L));
}
public void testIssue6965() {
SearchResponse response = client().prepareSearch("idx")
- .addAggregation(dateHistogram("histo").field("date").timeZone(DateTimeZone.forID("+01:00"))
- .dateHistogramInterval(DateHistogramInterval.MONTH).minDocCount(0))
+ .addAggregation(dateHistogram("histo").field("date").timeZone(ZoneId.of("+01:00"))
+ .dateHistogramInterval(DateHistogramInterval.MONTH).minDocCount(0))
.get();
assertSearchResponse(response);
- DateTimeZone tz = DateTimeZone.forID("+01:00");
+ ZoneId tz = ZoneId.of("+01:00");
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
@@ -1280,25 +1285,25 @@ public void testIssue6965() {
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
- DateTime key = new DateTime(2011, 12, 31, 23, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2011, 12, 31, 23, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
- key = new DateTime(2012, 1, 31, 23, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 1, 31, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
- key = new DateTime(2012, 2, 29, 23, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 29, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@@ -1309,7 +1314,7 @@ public void testDSTBoundaryIssue9491() throws InterruptedException, ExecutionExc
ensureSearchable("test9491");
SearchResponse response = client().prepareSearch("test9491")
.addAggregation(dateHistogram("histo").field("d").dateHistogramInterval(DateHistogramInterval.YEAR)
- .timeZone(DateTimeZone.forID("Asia/Jerusalem")))
+ .timeZone(ZoneId.of("Asia/Jerusalem")).format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX"))
.get();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
@@ -1327,8 +1332,8 @@ public void testIssue8209() throws InterruptedException, ExecutionException {
ensureSearchable("test8209");
SearchResponse response = client().prepareSearch("test8209")
.addAggregation(dateHistogram("histo").field("d").dateHistogramInterval(DateHistogramInterval.MONTH)
- .timeZone(DateTimeZone.forID("CET"))
- .minDocCount(0))
+ .format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX")
+ .timeZone(ZoneId.of("CET")).minDocCount(0))
.get();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
@@ -1371,7 +1376,7 @@ public void testFormatIndexUnmapped() throws InterruptedException, ExecutionExce
SearchResponse response = client().prepareSearch(indexDateUnmapped)
.addAggregation(
- dateHistogram("histo").field("dateField").dateHistogramInterval(DateHistogramInterval.MONTH).format("YYYY-MM")
+ dateHistogram("histo").field("dateField").dateHistogramInterval(DateHistogramInterval.MONTH).format("yyyy-MM")
.minDocCount(0).extendedBounds(new ExtendedBounds("2018-01", "2018-01")))
.get();
assertSearchResponse(response);
@@ -1393,15 +1398,19 @@ public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException,
indexRandom(true, client().prepareIndex(index, "type").setSource("d", "1477954800000"));
ensureSearchable(index);
SearchResponse response = client().prepareSearch(index).addAggregation(dateHistogram("histo").field("d")
- .dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(DateTimeZone.forID("Europe/Berlin"))).get();
+ .dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("Europe/Berlin"))).get();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(1));
- assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("1477954800000"));
+ if (JavaVersion.current().getVersion().get(0) == 8 && histo.getBuckets().get(0).getKeyAsString().endsWith(".0")) {
+ assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("1477954800000.0"));
+ } else {
+ assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("1477954800000"));
+ }
assertThat(histo.getBuckets().get(0).getDocCount(), equalTo(1L));
response = client().prepareSearch(index).addAggregation(dateHistogram("histo").field("d")
- .dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(DateTimeZone.forID("Europe/Berlin")).format("yyyy-MM-dd"))
+ .dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("Europe/Berlin")).format("yyyy-MM-dd"))
.get();
assertSearchResponse(response);
histo = response.getAggregations().get("histo");
@@ -1422,7 +1431,7 @@ public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException,
public void testDSTEndTransition() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setQuery(new MatchNoneQueryBuilder())
- .addAggregation(dateHistogram("histo").field("date").timeZone(DateTimeZone.forID("Europe/Oslo"))
+ .addAggregation(dateHistogram("histo").field("date").timeZone(ZoneId.of("Europe/Oslo"))
.dateHistogramInterval(DateHistogramInterval.HOUR).minDocCount(0).extendedBounds(
new ExtendedBounds("2015-10-25T02:00:00.000+02:00", "2015-10-25T04:00:00.000+01:00")))
.get();
@@ -1430,9 +1439,12 @@ public void testDSTEndTransition() throws Exception {
Histogram histo = response.getAggregations().get("histo");
List extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(4));
- assertThat(((DateTime) buckets.get(1).getKey()).getMillis() - ((DateTime) buckets.get(0).getKey()).getMillis(), equalTo(3600000L));
- assertThat(((DateTime) buckets.get(2).getKey()).getMillis() - ((DateTime) buckets.get(1).getKey()).getMillis(), equalTo(3600000L));
- assertThat(((DateTime) buckets.get(3).getKey()).getMillis() - ((DateTime) buckets.get(2).getKey()).getMillis(), equalTo(3600000L));
+ assertThat(((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli() -
+ ((ZonedDateTime) buckets.get(0).getKey()).toInstant().toEpochMilli(), equalTo(3600000L));
+ assertThat(((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli() -
+ ((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli(), equalTo(3600000L));
+ assertThat(((ZonedDateTime) buckets.get(3).getKey()).toInstant().toEpochMilli() -
+ ((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli(), equalTo(3600000L));
}
/**
@@ -1443,8 +1455,10 @@ public void testDontCacheScripts() throws Exception {
assertAcked(prepareCreate("cache_test_idx").addMapping("type", "d", "type=date")
.setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1))
.get());
- indexRandom(true, client().prepareIndex("cache_test_idx", "type", "1").setSource("d", date(1, 1)),
- client().prepareIndex("cache_test_idx", "type", "2").setSource("d", date(2, 1)));
+ String date = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(date(1, 1));
+ String date2 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(date(2, 1));
+ indexRandom(true, client().prepareIndex("cache_test_idx", "type", "1").setSource("d", date),
+ client().prepareIndex("cache_test_idx", "type", "2").setSource("d", date2));
// Make sure we are starting with a clear cache
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
@@ -1514,7 +1528,7 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound
}
private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) {
- DateTime[] expectedKeys = Arrays.stream(expectedDays).mapToObj(d -> date(1, d)).toArray(DateTime[]::new);
+ ZonedDateTime[] expectedKeys = Arrays.stream(expectedDays).mapToObj(d -> date(1, d)).toArray(ZonedDateTime[]::new);
SearchResponse response = client()
.prepareSearch("sort_idx")
.setTypes("type")
@@ -1544,7 +1558,7 @@ private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) {
}
}
- private DateTime key(Histogram.Bucket bucket) {
- return (DateTime) bucket.getKey();
+ private ZonedDateTime key(Histogram.Bucket bucket) {
+ return (ZonedDateTime) bucket.getKey();
}
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java
index 44a9f8c2cb126..080c4faffd696 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java
@@ -20,18 +20,19 @@
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.test.ESIntegTestCase;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.List;
-import java.util.concurrent.ExecutionException;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
@@ -50,9 +51,10 @@
public class DateHistogramOffsetIT extends ESIntegTestCase {
private static final String DATE_FORMAT = "yyyy-MM-dd:hh-mm-ss";
+ private static final DateFormatter FORMATTER = DateFormatter.forPattern(DATE_FORMAT);
- private DateTime date(String date) {
- return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date);
+ private ZonedDateTime date(String date) {
+ return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date));
}
@Before
@@ -65,8 +67,9 @@ public void afterEachTest() throws IOException {
internalCluster().wipeIndices("idx2");
}
- private void prepareIndex(DateTime date, int numHours, int stepSizeHours, int idxIdStart)
- throws IOException, InterruptedException, ExecutionException {
+ private void prepareIndex(ZonedDateTime date, int numHours, int stepSizeHours, int idxIdStart)
+ throws IOException, InterruptedException {
+
IndexRequestBuilder[] reqs = new IndexRequestBuilder[numHours];
for (int i = idxIdStart; i < idxIdStart + reqs.length; i++) {
reqs[i - idxIdStart] = client().prepareIndex("idx2", "type", "" + i)
@@ -94,8 +97,8 @@ public void testSingleValueWithPositiveOffset() throws Exception {
List extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
- checkBucketFor(buckets.get(0), new DateTime(2014, 3, 10, 2, 0, DateTimeZone.UTC), 2L);
- checkBucketFor(buckets.get(1), new DateTime(2014, 3, 11, 2, 0, DateTimeZone.UTC), 3L);
+ checkBucketFor(buckets.get(0), ZonedDateTime.of(2014, 3, 10, 2, 0, 0, 0, ZoneOffset.UTC), 2L);
+ checkBucketFor(buckets.get(1), ZonedDateTime.of(2014, 3, 11, 2, 0, 0, 0, ZoneOffset.UTC), 3L);
}
public void testSingleValueWithNegativeOffset() throws Exception {
@@ -116,8 +119,8 @@ public void testSingleValueWithNegativeOffset() throws Exception {
List extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
- checkBucketFor(buckets.get(0), new DateTime(2014, 3, 9, 22, 0, DateTimeZone.UTC), 2L);
- checkBucketFor(buckets.get(1), new DateTime(2014, 3, 10, 22, 0, DateTimeZone.UTC), 3L);
+ checkBucketFor(buckets.get(0), ZonedDateTime.of(2014, 3, 9, 22, 0, 0, 0, ZoneOffset.UTC), 2L);
+ checkBucketFor(buckets.get(1), ZonedDateTime.of(2014, 3, 10, 22, 0, 0, 0, ZoneOffset.UTC), 3L);
}
/**
@@ -143,11 +146,11 @@ public void testSingleValueWithOffsetMinDocCount() throws Exception {
List extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(5));
- checkBucketFor(buckets.get(0), new DateTime(2014, 3, 10, 6, 0, DateTimeZone.UTC), 6L);
- checkBucketFor(buckets.get(1), new DateTime(2014, 3, 11, 6, 0, DateTimeZone.UTC), 6L);
- checkBucketFor(buckets.get(2), new DateTime(2014, 3, 12, 6, 0, DateTimeZone.UTC), 0L);
- checkBucketFor(buckets.get(3), new DateTime(2014, 3, 13, 6, 0, DateTimeZone.UTC), 6L);
- checkBucketFor(buckets.get(4), new DateTime(2014, 3, 14, 6, 0, DateTimeZone.UTC), 6L);
+ checkBucketFor(buckets.get(0), ZonedDateTime.of(2014, 3, 10, 6, 0, 0, 0, ZoneOffset.UTC), 6L);
+ checkBucketFor(buckets.get(1), ZonedDateTime.of(2014, 3, 11, 6, 0, 0, 0, ZoneOffset.UTC), 6L);
+ checkBucketFor(buckets.get(2), ZonedDateTime.of(2014, 3, 12, 6, 0, 0, 0, ZoneOffset.UTC), 0L);
+ checkBucketFor(buckets.get(3), ZonedDateTime.of(2014, 3, 13, 6, 0, 0, 0, ZoneOffset.UTC), 6L);
+ checkBucketFor(buckets.get(4), ZonedDateTime.of(2014, 3, 14, 6, 0, 0, 0, ZoneOffset.UTC), 6L);
}
/**
@@ -155,10 +158,10 @@ public void testSingleValueWithOffsetMinDocCount() throws Exception {
* @param key the expected key
* @param expectedSize the expected size of the bucket
*/
- private static void checkBucketFor(Histogram.Bucket bucket, DateTime key, long expectedSize) {
+ private static void checkBucketFor(Histogram.Bucket bucket, ZonedDateTime key, long expectedSize) {
assertThat(bucket, notNullValue());
- assertThat(bucket.getKeyAsString(), equalTo(key.toString(DATE_FORMAT)));
- assertThat(((DateTime) bucket.getKey()), equalTo(key));
+ assertThat(bucket.getKeyAsString(), equalTo(FORMATTER.format(key)));
+ assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(expectedSize));
}
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java
index 91834de935b4f..f50c0bfd072b1 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java
@@ -18,9 +18,11 @@
*/
package org.elasticsearch.search.aggregations.bucket;
+import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.bootstrap.JavaVersion;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script;
@@ -33,9 +35,10 @@
import org.elasticsearch.search.aggregations.metrics.Sum;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matchers;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -70,12 +73,12 @@ private static IndexRequestBuilder indexDoc(int month, int day, int value) throw
.endObject());
}
- private static DateTime date(int month, int day) {
- return date(month, day, DateTimeZone.UTC);
+ private static ZonedDateTime date(int month, int day) {
+ return date(month, day, ZoneOffset.UTC);
}
- private static DateTime date(int month, int day, DateTimeZone timezone) {
- return new DateTime(2012, month, day, 0, 0, timezone);
+ private static ZonedDateTime date(int month, int day, ZoneId timezone) {
+ return ZonedDateTime.of(2012, month, day, 0, 0, 0, 0, timezone);
}
private static int numDocs;
@@ -128,7 +131,7 @@ public void testDateMath() throws Exception {
.prepareSearch("idx")
.addAggregation(
rangeBuilder.addUnboundedTo("a long time ago", "now-50y").addRange("recently", "now-50y", "now-1y")
- .addUnboundedFrom("last year", "now-1y").timeZone(DateTimeZone.forID("EST"))).get();
+ .addUnboundedFrom("last year", "now-1y").timeZone(ZoneId.of("Etc/GMT+5"))).get();
assertSearchResponse(response);
@@ -176,8 +179,8 @@ public void testSingleValueField() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -185,8 +188,8 @@ public void testSingleValueField() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -194,8 +197,8 @@ public void testSingleValueField() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@@ -222,8 +225,8 @@ public void testSingleValueFieldWithStringDates() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -231,8 +234,8 @@ public void testSingleValueFieldWithStringDates() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -240,8 +243,8 @@ public void testSingleValueFieldWithStringDates() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@@ -269,8 +272,8 @@ public void testSingleValueFieldWithStringDatesWithCustomFormat() throws Excepti
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -278,8 +281,8 @@ public void testSingleValueFieldWithStringDatesWithCustomFormat() throws Excepti
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15-2012-03-15"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -287,19 +290,17 @@ public void testSingleValueFieldWithStringDatesWithCustomFormat() throws Excepti
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
}
public void testSingleValueFieldWithDateMath() throws Exception {
- DateTimeZone timezone = randomDateTimeZone();
- int timeZoneOffset = timezone.getOffset(date(2, 15));
- // if time zone is UTC (or equivalent), time zone suffix is "Z", else something like "+03:00", which we get with the "ZZ" format
- String feb15Suffix = timeZoneOffset == 0 ? "Z" : date(2,15, timezone).toString("ZZ");
- String mar15Suffix = timeZoneOffset == 0 ? "Z" : date(3,15, timezone).toString("ZZ");
+ ZoneId timezone = randomZone();
+ int timeZoneOffset = timezone.getRules().getOffset(date(2, 15).toInstant()).getTotalSeconds();
+ String suffix = timezone.equals(ZoneOffset.UTC) ? "Z" : timezone.getId();
long expectedFirstBucketCount = timeZoneOffset < 0 ? 3L : 2L;
SearchResponse response = client().prepareSearch("idx")
@@ -321,29 +322,29 @@ public void testSingleValueFieldWithDateMath() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
- assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000" + feb15Suffix));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15, timezone).toDateTime(DateTimeZone.UTC)));
+ assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000" + suffix));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15, timezone).withZoneSameInstant(ZoneOffset.UTC)));
assertThat(bucket.getFromAsString(), nullValue());
- assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix));
+ assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000" + suffix));
assertThat(bucket.getDocCount(), equalTo(expectedFirstBucketCount));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
- assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix +
- "-2012-03-15T00:00:00.000" + mar15Suffix));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15, timezone).toDateTime(DateTimeZone.UTC)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15, timezone).toDateTime(DateTimeZone.UTC)));
- assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix));
- assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix));
+ assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000" + suffix +
+ "-2012-03-15T00:00:00.000" + suffix));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15, timezone).withZoneSameInstant(ZoneOffset.UTC)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15, timezone).withZoneSameInstant(ZoneOffset.UTC)));
+ assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000" + suffix));
+ assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000" + suffix));
assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
- assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix + "-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15, timezone).toDateTime(DateTimeZone.UTC)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
- assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix));
+ assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000" + suffix + "-*"));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15, timezone).withZoneSameInstant(ZoneOffset.UTC)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
+ assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000" + suffix));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 2L - expectedFirstBucketCount));
}
@@ -369,8 +370,8 @@ public void testSingleValueFieldWithCustomKey() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r1"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -378,8 +379,8 @@ public void testSingleValueFieldWithCustomKey() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r2"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -387,8 +388,8 @@ public void testSingleValueFieldWithCustomKey() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r3"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@@ -429,8 +430,8 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r1"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -444,8 +445,8 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r2"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -459,8 +460,8 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r3"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@@ -502,8 +503,8 @@ public void testMultiValuedField() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -511,8 +512,8 @@ public void testMultiValuedField() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(3L));
@@ -520,8 +521,8 @@ public void testMultiValuedField() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 2L));
@@ -557,8 +558,8 @@ public void testMultiValuedFieldWithValueScript() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(1L));
@@ -566,8 +567,8 @@ public void testMultiValuedFieldWithValueScript() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -575,8 +576,8 @@ public void testMultiValuedFieldWithValueScript() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 1L));
@@ -616,8 +617,8 @@ public void testScriptSingleValue() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -625,8 +626,8 @@ public void testScriptSingleValue() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -634,8 +635,8 @@ public void testScriptSingleValue() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@@ -675,8 +676,8 @@ public void testScriptMultiValued() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -684,8 +685,8 @@ public void testScriptMultiValued() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(3L));
@@ -693,8 +694,8 @@ public void testScriptMultiValued() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 2L));
@@ -723,8 +724,8 @@ public void testUnmapped() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(0L));
@@ -732,8 +733,8 @@ public void testUnmapped() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(0L));
@@ -741,8 +742,8 @@ public void testUnmapped() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(0L));
@@ -769,8 +770,8 @@ public void testUnmappedWithStringDates() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(0L));
@@ -778,8 +779,8 @@ public void testUnmappedWithStringDates() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(0L));
@@ -787,8 +788,8 @@ public void testUnmappedWithStringDates() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(0L));
@@ -815,8 +816,8 @@ public void testPartiallyUnmapped() throws Exception {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), nullValue());
- assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -824,8 +825,8 @@ public void testPartiallyUnmapped() throws Exception {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
- assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@@ -833,8 +834,8 @@ public void testPartiallyUnmapped() throws Exception {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
- assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
- assertThat(((DateTime) bucket.getTo()), nullValue());
+ assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
+ assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@@ -859,8 +860,8 @@ public void testEmptyAggregation() throws Exception {
assertThat(dateRange.getName(), equalTo("date_range"));
assertThat(buckets.size(), is(1));
assertThat((String) buckets.get(0).getKey(), equalTo("0-1"));
- assertThat(((DateTime) buckets.get(0).getFrom()).getMillis(), equalTo(0L));
- assertThat(((DateTime) buckets.get(0).getTo()).getMillis(), equalTo(1L));
+ assertThat(((ZonedDateTime) buckets.get(0).getFrom()).toInstant().toEpochMilli(), equalTo(0L));
+ assertThat(((ZonedDateTime) buckets.get(0).getTo()).toInstant().toEpochMilli(), equalTo(1L));
assertThat(buckets.get(0).getDocCount(), equalTo(0L));
assertThat(buckets.get(0).getAggregations().asList().isEmpty(), is(true));
}
@@ -903,7 +904,8 @@ public void testDontCacheScripts() throws Exception {
params.put("fieldname", "date");
SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(dateRange("foo").field("date")
.script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.DOUBLE_PLUS_ONE_MONTH, params))
- .addRange(new DateTime(2012, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC), new DateTime(2013, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC)))
+ .addRange(ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2013, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)))
.get();
assertSearchResponse(r);
@@ -915,7 +917,8 @@ public void testDontCacheScripts() throws Exception {
// To make sure that the cache is working test that a request not using
// a script is cached
r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(dateRange("foo").field("date")
- .addRange(new DateTime(2012, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC), new DateTime(2013, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC)))
+ .addRange(ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2013, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)))
.get();
assertSearchResponse(r);
@@ -969,10 +972,9 @@ public void testRangeWithFormatStringValue() throws Exception {
assertBucket(buckets.get(1), 1L, "3000000-4000000", 3000000L, 4000000L);
// providing numeric input without format should throw an exception
- Exception e = expectThrows(Exception.class, () -> client().prepareSearch(indexName).setSize(0)
+ ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> client().prepareSearch(indexName).setSize(0)
.addAggregation(dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000)).get());
- Throwable cause = e.getCause();
- assertThat(cause.getMessage(),
+ assertThat(e.getDetailedMessage(),
containsString("failed to parse date field [1000000] with format [strict_hour_minute_second]"));
}
@@ -984,9 +986,9 @@ public void testRangeWithFormatNumericValue() throws Exception {
String indexName = "dateformat_numeric_test_idx";
assertAcked(prepareCreate(indexName).addMapping("type", "date", "type=date,format=epoch_second"));
indexRandom(true,
- client().prepareIndex(indexName, "type", "1").setSource(jsonBuilder().startObject().field("date", 1000).endObject()),
+ client().prepareIndex(indexName, "type", "1").setSource(jsonBuilder().startObject().field("date", 1002).endObject()),
client().prepareIndex(indexName, "type", "2").setSource(jsonBuilder().startObject().field("date", 2000).endObject()),
- client().prepareIndex(indexName, "type", "3").setSource(jsonBuilder().startObject().field("date", 3000).endObject()));
+ client().prepareIndex(indexName, "type", "3").setSource(jsonBuilder().startObject().field("date", 3008).endObject()));
// using no format should work when to/from is compatible with format in
// mapping
@@ -994,39 +996,39 @@ public void testRangeWithFormatNumericValue() throws Exception {
.addAggregation(dateRange("date_range").field("date").addRange(1000, 3000).addRange(3000, 4000)).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L));
List buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2);
- assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
- assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
+ if (JavaVersion.current().getVersion().get(0) == 8) {
+ assertBucket(buckets.get(0), 2L, "1000.0-3000.0", 1000000L, 3000000L);
+ assertBucket(buckets.get(1), 1L, "3000.0-4000.0", 3000000L, 4000000L);
+ } else {
+ assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
+ assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
+ }
// using no format should also work when and to/from are string values
searchResponse = client().prepareSearch(indexName).setSize(0)
.addAggregation(dateRange("date_range").field("date").addRange("1000", "3000").addRange("3000", "4000")).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L));
buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2);
- assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
- assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
+ if (JavaVersion.current().getVersion().get(0) == 8) {
+ assertBucket(buckets.get(0), 2L, "1000.0-3000.0", 1000000L, 3000000L);
+ assertBucket(buckets.get(1), 1L, "3000.0-4000.0", 3000000L, 4000000L);
+ } else {
+ assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
+ assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
+ }
// also e-notation should work, fractional parts should be truncated
searchResponse = client().prepareSearch(indexName).setSize(0)
.addAggregation(dateRange("date_range").field("date").addRange(1.0e3, 3000.8123).addRange(3000.8123, 4.0e3)).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L));
buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2);
- assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
- assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
-
- // also e-notation and floats provided as string also be truncated (see: #14641)
- searchResponse = client().prepareSearch(indexName).setSize(0)
- .addAggregation(dateRange("date_range").field("date").addRange("1.0e3", "3.0e3").addRange("3.0e3", "4.0e3")).get();
- assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L));
- buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2);
- assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
- assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
-
- searchResponse = client().prepareSearch(indexName).setSize(0)
- .addAggregation(dateRange("date_range").field("date").addRange("1000.123", "3000.8").addRange("3000.8", "4000.3")).get();
- assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L));
- buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2);
- assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
- assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
+ if (JavaVersion.current().getVersion().get(0) == 8) {
+ assertBucket(buckets.get(0), 2L, "1000.0-3000.0", 1000000L, 3000000L);
+ assertBucket(buckets.get(1), 1L, "3000.0-4000.0", 3000000L, 4000000L);
+ } else {
+ assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
+ assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
+ }
// using different format should work when to/from is compatible with
// format in aggregation
@@ -1061,8 +1063,8 @@ private static List checkBuckets(Range dateRange, String expectedA
private static void assertBucket(Bucket bucket, long bucketSize, String expectedKey, long expectedFrom, long expectedTo) {
assertThat(bucket.getDocCount(), equalTo(bucketSize));
assertThat((String) bucket.getKey(), equalTo(expectedKey));
- assertThat(((DateTime) bucket.getFrom()).getMillis(), equalTo(expectedFrom));
- assertThat(((DateTime) bucket.getTo()).getMillis(), equalTo(expectedTo));
+ assertThat(((ZonedDateTime) bucket.getFrom()).toInstant().toEpochMilli(), equalTo(expectedFrom));
+ assertThat(((ZonedDateTime) bucket.getTo()).toInstant().toEpochMilli(), equalTo(expectedTo));
assertThat(bucket.getAggregations().asList().isEmpty(), is(true));
}
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java
index 1c198fd3ca5d6..34164bc28967c 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java
@@ -65,7 +65,7 @@ protected DateRangeAggregationBuilder createTestAggregatorBuilder() {
factory.missing(randomIntBetween(0, 10));
}
if (randomBoolean()) {
- factory.timeZone(randomDateTimeZone());
+ factory.timeZone(randomZone());
}
return factory;
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java
index e7bf0fe4cf700..b09277aca6c6d 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java
@@ -22,11 +22,10 @@
import com.carrotsearch.hppc.LongHashSet;
import com.carrotsearch.hppc.LongSet;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
-
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.common.time.DateFormatters;
+import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
@@ -124,7 +123,7 @@ public void setupSuiteScopeCluster() throws Exception {
double doubleTerm = longTerm * Math.PI;
ZonedDateTime time = ZonedDateTime.of(2014, 1, ((int) longTerm % 20) + 1, 0, 0, 0, 0, ZoneOffset.UTC);
- String dateTerm = DateFormatters.forPattern("yyyy-MM-dd").format(time);
+ String dateTerm = DateFormatter.forPattern("yyyy-MM-dd").format(time);
final int frequency = randomBoolean() ? 1 : randomIntBetween(2, 20);
for (int j = 0; j < frequency; ++j) {
indexRequests.add(client().prepareIndex("idx", "type").setSource(jsonBuilder()
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java
index ac985660399d7..d31f7a89b462e 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java
@@ -42,7 +42,7 @@ private DateHistogramValuesSourceBuilder randomDateHistogramSourceBuilder() {
histo.interval(randomNonNegativeLong());
}
if (randomBoolean()) {
- histo.timeZone(randomDateTimeZone());
+ histo.timeZone(randomZone());
}
if (randomBoolean()) {
histo.missingBucket(true);
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java
index e945eeba519f4..5f219ee6be948 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java
@@ -39,6 +39,7 @@
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.IpFieldMapper;
@@ -57,12 +58,12 @@
import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.sort.SortOrder;
-import org.joda.time.DateTimeZone;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.net.InetAddress;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -1155,8 +1156,7 @@ public void testThatDateHistogramFailsFormatAfter() throws IOException {
},
(result) -> {}
));
- assertThat(exc.getCause(), instanceOf(IllegalArgumentException.class));
- assertThat(exc.getCause().getMessage(), containsString("Parse failure"));
+ assertThat(exc.getMessage(), containsString("failed to parse date field [1474329600000]"));
}
public void testWithDateHistogramAndTimeZone() throws IOException {
@@ -1176,7 +1176,7 @@ public void testWithDateHistogramAndTimeZone() throws IOException {
DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date")
.field("date")
.dateHistogramInterval(DateHistogramInterval.days(1))
- .timeZone(DateTimeZone.forOffsetHours(1));
+ .timeZone(ZoneOffset.ofHours(1));
return new CompositeAggregationBuilder("name", Collections.singletonList(histo));
},
(result) -> {
@@ -1196,7 +1196,7 @@ public void testWithDateHistogramAndTimeZone() throws IOException {
DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date")
.field("date")
.dateHistogramInterval(DateHistogramInterval.days(1))
- .timeZone(DateTimeZone.forOffsetHours(1));
+ .timeZone(ZoneOffset.ofHours(1));
return new CompositeAggregationBuilder("name", Collections.singletonList(histo))
.aggregateAfter(createAfterKey("date", 1474326000000L));
@@ -1835,6 +1835,6 @@ private static Map> createDocument(Object... fields) {
}
private static long asLong(String dateTime) {
- return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(dateTime).getMillis();
+ return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli();
}
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java
index be9a760150427..35bf575d046be 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java
@@ -30,10 +30,10 @@
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;
-import org.joda.time.DateTimeZone;
import org.junit.After;
import java.io.IOException;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
@@ -64,7 +64,7 @@ private static DocValueFormat randomDocValueFormat(boolean isLong) {
if (isLong) {
// we use specific format only for date histogram on a long/date field
if (randomBoolean()) {
- return new DocValueFormat.DateTime(DateFormatter.forPattern("epoch_second"), DateTimeZone.forOffsetHours(1));
+ return new DocValueFormat.DateTime(DateFormatter.forPattern("epoch_second"), ZoneOffset.ofHours(1));
} else {
return DocValueFormat.RAW;
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java
index 6b4d1482adb5e..9293b33e22f43 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java
@@ -33,6 +33,7 @@
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.search.aggregations.AggregationBuilders;
@@ -41,11 +42,13 @@
import org.elasticsearch.search.aggregations.metrics.InternalStats;
import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper;
import org.hamcrest.Matchers;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import org.junit.Assert;
import java.io.IOException;
+import java.time.LocalDate;
+import java.time.YearMonth;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -59,17 +62,17 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
private static final String DATE_FIELD = "date";
private static final String INSTANT_FIELD = "instant";
- private static final List DATES_WITH_TIME = Arrays.asList(
- new DateTime(2010, 3, 12, 1, 7, 45, DateTimeZone.UTC),
- new DateTime(2010, 4, 27, 3, 43, 34, DateTimeZone.UTC),
- new DateTime(2012, 5, 18, 4, 11, 0, DateTimeZone.UTC),
- new DateTime(2013, 5, 29, 5, 11, 31, DateTimeZone.UTC),
- new DateTime(2013, 10, 31, 8, 24, 5, DateTimeZone.UTC),
- new DateTime(2015, 2, 13, 13, 9, 32, DateTimeZone.UTC),
- new DateTime(2015, 6, 24, 13, 47, 43, DateTimeZone.UTC),
- new DateTime(2015, 11, 13, 16, 14, 34, DateTimeZone.UTC),
- new DateTime(2016, 3, 4, 17, 9, 50, DateTimeZone.UTC),
- new DateTime(2017, 12, 12, 22, 55, 46, DateTimeZone.UTC));
+ private static final List DATES_WITH_TIME = Arrays.asList(
+ ZonedDateTime.of(2010, 3, 12, 1, 7, 45, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2010, 4, 27, 3, 43, 34, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2012, 5, 18, 4, 11, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2013, 5, 29, 5, 11, 31, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2013, 10, 31, 8, 24, 5, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2015, 2, 13, 13, 9, 32, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2015, 6, 24, 13, 47, 43, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2015, 11, 13, 16, 14, 34, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2016, 3, 4, 17, 9, 50, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 12, 12, 22, 55, 46, 0, ZoneOffset.UTC));
private static final Query DEFAULT_QUERY = new MatchAllDocsQuery();
@@ -184,7 +187,7 @@ public void testSubAggregations() throws IOException {
}
public void testNoDocs() throws IOException {
- final List dates = Collections.emptyList();
+ final List dates = Collections.emptyList();
final Consumer aggregation = agg -> agg.setNumBuckets(10).field(DATE_FIELD);
testSearchCase(DEFAULT_QUERY, dates, aggregation,
@@ -209,8 +212,10 @@ public void testAggregateWrongField() throws IOException {
}
public void testIntervalYear() throws IOException {
- final long start = new DateTime(DateTimeZone.UTC).withDate(2015, 1, 1).getMillis();
- final long end = new DateTime(DateTimeZone.UTC).withDate(2017, 12, 31).getMillis();
+
+
+ final long start = LocalDate.of(2015, 1, 1).atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli();
+ final long end = LocalDate.of(2017, 12, 31).atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli();
final Query rangeQuery = LongPoint.newRangeQuery(INSTANT_FIELD, start, end);
testSearchCase(rangeQuery, DATES_WITH_TIME,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
@@ -228,8 +233,8 @@ public void testIntervalYear() throws IOException {
testSearchAndReduceCase(rangeQuery, DATES_WITH_TIME,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
histogram -> {
- final DateTime startDate = new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC);
- final Map expectedDocCount = new HashMap<>();
+ final ZonedDateTime startDate = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
+ final Map expectedDocCount = new HashMap<>();
expectedDocCount.put(startDate, 3);
expectedDocCount.put(startDate.plusYears(1), 1);
expectedDocCount.put(startDate.plusYears(2), 1);
@@ -243,13 +248,13 @@ public void testIntervalYear() throws IOException {
}
public void testIntervalMonth() throws IOException {
- final List datesForMonthInterval = Arrays.asList(
- new DateTime(2017, 1, 1, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 3, 4, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 3, 5, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 3, 6, 0, 0, 0, DateTimeZone.UTC));
+ final List datesForMonthInterval = Arrays.asList(
+ ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 3, 4, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 3, 5, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 3, 6, 0, 0, 0, 0, ZoneOffset.UTC));
testSearchCase(DEFAULT_QUERY, datesForMonthInterval,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), histogram -> {
final List extends Histogram.Bucket> buckets = histogram.getBuckets();
@@ -263,7 +268,7 @@ public void testIntervalMonth() throws IOException {
testSearchAndReduceCase(DEFAULT_QUERY, datesForMonthInterval,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
histogram -> {
- final Map expectedDocCount = new HashMap<>();
+ final Map expectedDocCount = new HashMap<>();
expectedDocCount.put(datesForMonthInterval.get(0).withDayOfMonth(1), 1);
expectedDocCount.put(datesForMonthInterval.get(1).withDayOfMonth(1), 2);
expectedDocCount.put(datesForMonthInterval.get(3).withDayOfMonth(1), 3);
@@ -287,15 +292,15 @@ public void testWithLargeNumberOfBuckets() {
}
public void testIntervalDay() throws IOException {
- final List datesForDayInterval = Arrays.asList(
- new DateTime(2017, 2, 1, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 5, 0, 0, 0, DateTimeZone.UTC));
- final Map expectedDocCount = new HashMap<>();
+ final List datesForDayInterval = Arrays.asList(
+ ZonedDateTime.of(2017, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 5, 0, 0, 0, 0, ZoneOffset.UTC));
+ final Map expectedDocCount = new HashMap<>();
expectedDocCount.put(datesForDayInterval.get(0), 1);
expectedDocCount.put(datesForDayInterval.get(1), 2);
expectedDocCount.put(datesForDayInterval.get(3), 3);
@@ -321,16 +326,16 @@ public void testIntervalDay() throws IOException {
}
public void testIntervalDayWithTZ() throws IOException {
- final List datesForDayInterval = Arrays.asList(
- new DateTime(2017, 2, 1, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 5, 0, 0, 0, DateTimeZone.UTC));
+ final List datesForDayInterval = Arrays.asList(
+ ZonedDateTime.of(2017, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 5, 0, 0, 0, 0, ZoneOffset.UTC));
testSearchCase(DEFAULT_QUERY, datesForDayInterval,
- aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), histogram -> {
+ aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)), histogram -> {
final Map expectedDocCount = new HashMap<>();
expectedDocCount.put("2017-01-31T23:00:00.000-01:00", 1);
expectedDocCount.put("2017-02-01T23:00:00.000-01:00", 2);
@@ -343,7 +348,7 @@ public void testIntervalDayWithTZ() throws IOException {
assertTrue(AggregationInspectionHelper.hasValue(histogram));
});
testSearchAndReduceCase(DEFAULT_QUERY, datesForDayInterval,
- aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), histogram -> {
+ aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)), histogram -> {
final Map expectedDocCount = new HashMap<>();
expectedDocCount.put("2017-01-31T00:00:00.000-01:00", 1);
expectedDocCount.put("2017-02-01T00:00:00.000-01:00", 2);
@@ -358,17 +363,17 @@ public void testIntervalDayWithTZ() throws IOException {
}
public void testIntervalHour() throws IOException {
- final List datesForHourInterval = Arrays.asList(
- new DateTime(2017, 2, 1, 9, 2, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 9, 35, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 10, 15, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 13, 6, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 14, 4, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 14, 5, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 15, 59, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 16, 6, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 16, 48, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 16, 59, 0, DateTimeZone.UTC));
+ final List datesForHourInterval = Arrays.asList(
+ ZonedDateTime.of(2017, 2, 1, 9, 2, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 9, 35, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 10, 15, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 13, 6, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 14, 4, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 14, 5, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 15, 59, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 16, 6, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 16, 48, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 16, 59, 0, 0, ZoneOffset.UTC));
testSearchCase(DEFAULT_QUERY, datesForHourInterval,
aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD),
histogram -> {
@@ -384,13 +389,13 @@ public void testIntervalHour() throws IOException {
testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval,
aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD),
histogram -> {
- final Map expectedDocCount = new HashMap<>();
- expectedDocCount.put(datesForHourInterval.get(0).withMinuteOfHour(0), 2);
- expectedDocCount.put(datesForHourInterval.get(2).withMinuteOfHour(0), 1);
- expectedDocCount.put(datesForHourInterval.get(3).withMinuteOfHour(0), 1);
- expectedDocCount.put(datesForHourInterval.get(4).withMinuteOfHour(0), 2);
- expectedDocCount.put(datesForHourInterval.get(6).withMinuteOfHour(0), 1);
- expectedDocCount.put(datesForHourInterval.get(7).withMinuteOfHour(0), 3);
+ final Map expectedDocCount = new HashMap<>();
+ expectedDocCount.put(datesForHourInterval.get(0).withMinute(0), 2);
+ expectedDocCount.put(datesForHourInterval.get(2).withMinute(0), 1);
+ expectedDocCount.put(datesForHourInterval.get(3).withMinute(0), 1);
+ expectedDocCount.put(datesForHourInterval.get(4).withMinute(0), 2);
+ expectedDocCount.put(datesForHourInterval.get(6).withMinute(0), 1);
+ expectedDocCount.put(datesForHourInterval.get(7).withMinute(0), 3);
final List extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(8, buckets.size());
buckets.forEach(bucket ->
@@ -400,10 +405,10 @@ public void testIntervalHour() throws IOException {
testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval,
aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD),
histogram -> {
- final Map expectedDocCount = new HashMap<>();
- expectedDocCount.put(datesForHourInterval.get(0).withMinuteOfHour(0), 3);
- expectedDocCount.put(datesForHourInterval.get(0).plusHours(3).withMinuteOfHour(0), 3);
- expectedDocCount.put(datesForHourInterval.get(0).plusHours(6).withMinuteOfHour(0), 4);
+ final Map expectedDocCount = new HashMap<>();
+ expectedDocCount.put(datesForHourInterval.get(0).withMinute(0), 3);
+ expectedDocCount.put(datesForHourInterval.get(0).plusHours(3).withMinute(0), 3);
+ expectedDocCount.put(datesForHourInterval.get(0).plusHours(6).withMinute(0), 4);
final List extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(expectedDocCount.size(), buckets.size());
buckets.forEach(bucket ->
@@ -413,22 +418,23 @@ public void testIntervalHour() throws IOException {
}
public void testIntervalHourWithTZ() throws IOException {
- final List datesForHourInterval = Arrays.asList(
- new DateTime(2017, 2, 1, 9, 2, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 9, 35, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 10, 15, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 13, 6, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 14, 4, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 14, 5, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 15, 59, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 16, 6, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 16, 48, 0, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 16, 59, 0, DateTimeZone.UTC));
+ final List datesForHourInterval = Arrays.asList(
+ ZonedDateTime.of(2017, 2, 1, 9, 2, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 9, 35, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 10, 15, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 13, 6, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 14, 4, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 14, 5, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 15, 59, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 16, 6, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 16, 48, 0, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 16, 59, 0, 0, ZoneOffset.UTC));
testSearchCase(DEFAULT_QUERY, datesForHourInterval,
- aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)),
+ aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)),
histogram -> {
final List dateStrings = datesForHourInterval.stream()
- .map(dateTime -> dateTime.withZone(DateTimeZone.forOffsetHours(-1)).toString()).collect(Collectors.toList());
+ .map(dateTime -> DateFormatter.forPattern("strict_date_time")
+ .format(dateTime.withZoneSameInstant(ZoneOffset.ofHours(-1)))).collect(Collectors.toList());
final List extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(datesForHourInterval.size(), buckets.size());
for (int i = 0; i < buckets.size(); i++) {
@@ -439,7 +445,7 @@ public void testIntervalHourWithTZ() throws IOException {
}
);
testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval,
- aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)),
+ aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)),
histogram -> {
final Map expectedDocCount = new HashMap<>();
expectedDocCount.put("2017-02-01T08:00:00.000-01:00", 2);
@@ -458,10 +464,10 @@ public void testIntervalHourWithTZ() throws IOException {
public void testRandomSecondIntervals() throws IOException {
final int length = 120;
- final List dataset = new ArrayList<>(length);
- final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, 0, DateTimeZone.UTC);
+ final List dataset = new ArrayList<>(length);
+ final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
- final DateTime date = startDate.plusSeconds(i);
+ final ZonedDateTime date = startDate.plusSeconds(i);
dataset.add(date);
}
final Map bucketsToExpectedDocCountMap = new HashMap<>();
@@ -487,10 +493,10 @@ public void testRandomSecondIntervals() throws IOException {
public void testRandomMinuteIntervals() throws IOException {
final int length = 120;
- final List dataset = new ArrayList<>(length);
- final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC);
+ final List dataset = new ArrayList<>(length);
+ final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
- final DateTime date = startDate.plusMinutes(i);
+ final ZonedDateTime date = startDate.plusMinutes(i);
dataset.add(date);
}
final Map bucketsToExpectedDocCountMap = new HashMap<>();
@@ -516,10 +522,10 @@ public void testRandomMinuteIntervals() throws IOException {
public void testRandomHourIntervals() throws IOException {
final int length = 72;
- final List dataset = new ArrayList<>(length);
- final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC);
+ final List dataset = new ArrayList<>(length);
+ final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
- final DateTime date = startDate.plusHours(i);
+ final ZonedDateTime date = startDate.plusHours(i);
dataset.add(date);
}
final Map bucketsToExpectedDocCountMap = new HashMap<>();
@@ -544,10 +550,10 @@ public void testRandomHourIntervals() throws IOException {
public void testRandomDayIntervals() throws IOException {
final int length = 140;
- final List dataset = new ArrayList<>(length);
- final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC);
+ final List dataset = new ArrayList<>(length);
+ final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
- final DateTime date = startDate.plusDays(i);
+ final ZonedDateTime date = startDate.plusDays(i);
dataset.add(date);
}
final int randomChoice = randomIntBetween(1, 3);
@@ -583,17 +589,17 @@ public void testRandomDayIntervals() throws IOException {
final int randomIndex = randomInt(2);
final Histogram.Bucket bucket = buckets.get(randomIndex);
assertEquals(startDate.plusMonths(randomIndex), bucket.getKey());
- assertEquals(startDate.plusMonths(randomIndex).dayOfMonth().getMaximumValue(), bucket.getDocCount());
+ assertEquals(YearMonth.from(startDate.plusMonths(randomIndex)).lengthOfMonth(), bucket.getDocCount());
});
}
}
public void testRandomMonthIntervals() throws IOException {
final int length = 60;
- final List dataset = new ArrayList<>(length);
- final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC);
+ final List dataset = new ArrayList<>(length);
+ final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
- final DateTime date = startDate.plusMonths(i);
+ final ZonedDateTime date = startDate.plusMonths(i);
dataset.add(date);
}
final Map bucketsToExpectedDocCountMap = new HashMap<>();
@@ -617,10 +623,10 @@ public void testRandomMonthIntervals() throws IOException {
public void testRandomYearIntervals() throws IOException {
final int length = 300;
- final List dataset = new ArrayList<>(length);
- final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC);
+ final List dataset = new ArrayList<>(length);
+ final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
- final DateTime date = startDate.plusYears(i);
+ final ZonedDateTime date = startDate.plusYears(i);
dataset.add(date);
}
final Map bucketsToExpectedDocCountMap = new HashMap<>();
@@ -646,12 +652,12 @@ public void testRandomYearIntervals() throws IOException {
}
public void testIntervalMinute() throws IOException {
- final List datesForMinuteInterval = Arrays.asList(
- new DateTime(2017, 2, 1, 9, 2, 35, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 9, 2, 59, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 9, 15, 37, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 9, 16, 4, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 9, 16, 42, DateTimeZone.UTC));
+ final List datesForMinuteInterval = Arrays.asList(
+ ZonedDateTime.of(2017, 2, 1, 9, 2, 35, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 9, 2, 59, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 9, 15, 37, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 9, 16, 4, 0, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 9, 16, 42, 0, ZoneOffset.UTC));
testSearchCase(DEFAULT_QUERY, datesForMinuteInterval,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
@@ -668,10 +674,10 @@ public void testIntervalMinute() throws IOException {
testSearchAndReduceCase(DEFAULT_QUERY, datesForMinuteInterval,
aggregation -> aggregation.setNumBuckets(15).field(DATE_FIELD),
histogram -> {
- final Map expectedDocCount = new HashMap<>();
- expectedDocCount.put(datesForMinuteInterval.get(0).withSecondOfMinute(0), 2);
- expectedDocCount.put(datesForMinuteInterval.get(2).withSecondOfMinute(0), 1);
- expectedDocCount.put(datesForMinuteInterval.get(3).withSecondOfMinute(0), 2);
+ final Map expectedDocCount = new HashMap<>();
+ expectedDocCount.put(datesForMinuteInterval.get(0).withSecond(0), 2);
+ expectedDocCount.put(datesForMinuteInterval.get(2).withSecond(0), 1);
+ expectedDocCount.put(datesForMinuteInterval.get(3).withSecond(0), 2);
final List extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(15, buckets.size());
buckets.forEach(bucket ->
@@ -681,15 +687,15 @@ public void testIntervalMinute() throws IOException {
}
public void testIntervalSecond() throws IOException {
- final List datesForSecondInterval = Arrays.asList(
- new DateTime(2017, 2, 1, 0, 0, 5, 15, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 0, 0, 7, 299, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 0, 0, 7, 74, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 0, 0, 11, 688, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 0, 0, 11, 210, DateTimeZone.UTC),
- new DateTime(2017, 2, 1, 0, 0, 11, 380, DateTimeZone.UTC));
- final DateTime startDate = datesForSecondInterval.get(0).withMillisOfSecond(0);
- final Map expectedDocCount = new HashMap<>();
+ final List datesForSecondInterval = Arrays.asList(
+ ZonedDateTime.of(2017, 2, 1, 0, 0, 5, 15, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 0, 0, 7, 299, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 0, 0, 7, 74, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 688, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 210, ZoneOffset.UTC),
+ ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 380, ZoneOffset.UTC));
+ final ZonedDateTime startDate = datesForSecondInterval.get(0).withNano(0);
+ final Map expectedDocCount = new HashMap<>();
expectedDocCount.put(startDate, 1);
expectedDocCount.put(startDate.plusSeconds(2), 2);
expectedDocCount.put(startDate.plusSeconds(6), 3);
@@ -712,19 +718,19 @@ public void testIntervalSecond() throws IOException {
);
}
- private void testSearchCase(final Query query, final List dataset,
+ private void testSearchCase(final Query query, final List dataset,
final Consumer configure,
final Consumer verify) throws IOException {
executeTestCase(false, query, dataset, configure, verify);
}
- private void testSearchAndReduceCase(final Query query, final List dataset,
+ private void testSearchAndReduceCase(final Query query, final List dataset,
final Consumer configure,
final Consumer verify) throws IOException {
executeTestCase(true, query, dataset, configure, verify);
}
- private void testBothCases(final Query query, final List dataset,
+ private void testBothCases(final Query query, final List dataset,
final Consumer configure,
final Consumer verify) throws IOException {
executeTestCase(false, query, dataset, configure, verify);
@@ -745,18 +751,18 @@ protected IndexSettings createIndexSettings() {
);
}
- private void executeTestCase(final boolean reduced, final Query query, final List dataset,
+ private void executeTestCase(final boolean reduced, final Query query, final List dataset,
final Consumer configure,
final Consumer verify) throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
final Document document = new Document();
- for (final DateTime date : dataset) {
+ for (final ZonedDateTime date : dataset) {
if (frequently()) {
indexWriter.commit();
}
- final long instant = date.getMillis();
+ final long instant = date.toInstant().toEpochMilli();
document.add(new SortedNumericDocValuesField(DATE_FIELD, instant));
document.add(new LongPoint(INSTANT_FIELD, instant));
indexWriter.addDocument(document);
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java
index c1b9396664a22..2fbf60a3ddccb 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java
@@ -30,6 +30,7 @@
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
@@ -474,6 +475,6 @@ private void executeTestCase(boolean reduced, Query query, List dataset,
}
private static long asLong(String dateTime) {
- return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(dateTime).getMillis();
+ return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli();
}
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java
index 5148b0b85754f..c65b21ef72d32 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java
@@ -31,9 +31,10 @@
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
import org.elasticsearch.search.aggregations.BucketOrder;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.List;
@@ -137,7 +138,7 @@ private static Document documentForDate(String field, long millis) {
}
public void testRewriteTimeZone() throws IOException {
- DateFormatter format = DateFormatters.forPattern("strict_date_optional_time");
+ DateFormatter format = DateFormatter.forPattern("strict_date_optional_time");
try (Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig())) {
@@ -166,15 +167,15 @@ public void testRewriteTimeZone() throws IOException {
assertNull(builder.rewriteTimeZone(shardContextThatCrosses));
// fixed timeZone => no rewrite
- DateTimeZone tz = DateTimeZone.forOffsetHours(1);
+ ZoneId tz = ZoneOffset.ofHours(1);
builder.timeZone(tz);
assertSame(tz, builder.rewriteTimeZone(shardContextThatDoesntCross));
assertSame(tz, builder.rewriteTimeZone(shardContextThatCrosses));
// daylight-saving-times => rewrite if doesn't cross
- tz = DateTimeZone.forID("Europe/Paris");
+ tz = ZoneId.of("Europe/Paris");
builder.timeZone(tz);
- assertEquals(DateTimeZone.forOffsetHours(1), builder.rewriteTimeZone(shardContextThatDoesntCross));
+ assertEquals(ZoneOffset.ofHours(1), builder.rewriteTimeZone(shardContextThatDoesntCross));
assertSame(tz, builder.rewriteTimeZone(shardContextThatCrosses));
// Rounded values are no longer all within the same transitions => no rewrite
@@ -187,7 +188,7 @@ public void testRewriteTimeZone() throws IOException {
builder.timeZone(tz);
builder.interval(1000L * 60 * 60 * 24); // ~ 1 day
- assertEquals(DateTimeZone.forOffsetHours(1), builder.rewriteTimeZone(shardContextThatDoesntCross));
+ assertEquals(ZoneOffset.ofHours(1), builder.rewriteTimeZone(shardContextThatDoesntCross));
assertSame(tz, builder.rewriteTimeZone(shardContextThatCrosses));
// Because the interval is large, rounded values are not
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java
index 486f78778c452..0980bb7cf97ec 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java
@@ -36,13 +36,9 @@
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESTestCase;
-import org.joda.time.DateTimeZone;
-import org.joda.time.Instant;
-import org.joda.time.format.DateTimeFormatter;
-import org.joda.time.format.ISODateTimeFormat;
import java.io.IOException;
-import java.util.Locale;
+import java.time.ZoneOffset;
import static java.lang.Math.max;
import static java.lang.Math.min;
@@ -66,17 +62,19 @@ public static ExtendedBounds randomExtendedBounds() {
* Construct a random {@link ExtendedBounds} in pre-parsed form.
*/
public static ExtendedBounds randomParsedExtendedBounds() {
+ long maxDateValue = 253402300799999L; // end of year 9999
+ long minDateValue = -377705116800000L; // beginning of year -9999
if (randomBoolean()) {
// Construct with one missing bound
if (randomBoolean()) {
- return new ExtendedBounds(null, randomLong());
+ return new ExtendedBounds(null, maxDateValue);
}
- return new ExtendedBounds(randomLong(), null);
+ return new ExtendedBounds(minDateValue, null);
}
- long a = randomLong();
+ long a = randomLongBetween(minDateValue, maxDateValue);
long b;
do {
- b = randomLong();
+ b = randomLongBetween(minDateValue, maxDateValue);
} while (a == b);
long min = min(a, b);
long max = max(a, b);
@@ -88,9 +86,9 @@ public static ExtendedBounds randomParsedExtendedBounds() {
*/
public static ExtendedBounds unparsed(ExtendedBounds template) {
// It'd probably be better to randomize the formatter
- DateTimeFormatter formatter = ISODateTimeFormat.dateTime().withLocale(Locale.ROOT).withZone(DateTimeZone.UTC);
- String minAsStr = template.getMin() == null ? null : formatter.print(new Instant(template.getMin()));
- String maxAsStr = template.getMax() == null ? null : formatter.print(new Instant(template.getMax()));
+ DateFormatter formatter = DateFormatter.forPattern("strict_date_time").withZone(ZoneOffset.UTC);
+ String minAsStr = template.getMin() == null ? null : formatter.formatMillis(template.getMin());
+ String maxAsStr = template.getMax() == null ? null : formatter.formatMillis(template.getMax());
return new ExtendedBounds(minAsStr, maxAsStr);
}
@@ -104,7 +102,7 @@ public void testParseAndValidate() {
null, xContentRegistry(), writableRegistry(), null, null, () -> now, null);
when(context.getQueryShardContext()).thenReturn(qsc);
DateFormatter formatter = DateFormatter.forPattern("dateOptionalTime");
- DocValueFormat format = new DocValueFormat.DateTime(formatter, DateTimeZone.UTC);
+ DocValueFormat format = new DocValueFormat.DateTime(formatter, ZoneOffset.UTC);
ExtendedBounds expected = randomParsedExtendedBounds();
ExtendedBounds parsed = unparsed(expected).parseAndValidate("test", context, format);
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java
index dd3425c20f43c..fe5c967f54be8 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java
@@ -19,8 +19,8 @@
package org.elasticsearch.search.aggregations.bucket.histogram;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.Writeable;
-import org.elasticsearch.common.rounding.DateTimeUnit;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
@@ -28,12 +28,12 @@
import org.elasticsearch.search.aggregations.bucket.histogram.InternalAutoDateHistogram.BucketInfo;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.time.Instant;
import java.time.OffsetDateTime;
+import java.time.ZoneId;
import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -89,16 +89,16 @@ protected InternalAutoDateHistogram createTestInstance(String name,
*/
public void testGetAppropriateRoundingUsesCorrectIntervals() {
RoundingInfo[] roundings = new RoundingInfo[6];
- DateTimeZone timeZone = DateTimeZone.UTC;
+ ZoneId timeZone = ZoneOffset.UTC;
// Since we pass 0 as the starting index to getAppropriateRounding, we'll also use
// an innerInterval that is quite large, such that targetBuckets * roundings[i].getMaximumInnerInterval()
// will be larger than the estimate.
- roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone),
- 1000L, "s", 1000);
- roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone),
- 60 * 1000L, "m", 1, 5, 10, 30);
- roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone),
- 60 * 60 * 1000L, "h", 1, 3, 12);
+ roundings[0] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.SECOND_OF_MINUTE, timeZone),
+ 1000L, "s",1000);
+ roundings[1] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.MINUTES_OF_HOUR, timeZone),
+ 60 * 1000L, "m",1, 5, 10, 30);
+ roundings[2] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.HOUR_OF_DAY, timeZone),
+ 60 * 60 * 1000L, "h",1, 3, 12);
OffsetDateTime timestamp = Instant.parse("2018-01-01T00:00:01.000Z").atOffset(ZoneOffset.UTC);
// We want to pass a roundingIdx of zero, because in order to reproduce this bug, we need the function
@@ -117,7 +117,7 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List= keyForBucket
&& roundedBucketKey < keyForBucket + intervalInMillis) {
@@ -194,7 +194,7 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List actualCounts = new TreeMap<>();
for (Histogram.Bucket bucket : reduced.getBuckets()) {
- actualCounts.compute(((DateTime) bucket.getKey()).getMillis(),
+ actualCounts.compute(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli(),
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount());
}
assertEquals(expectedCounts, actualCounts);
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java
index dd5d06f8785f7..961a05a7c40fd 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java
@@ -19,8 +19,8 @@
package org.elasticsearch.search.aggregations.bucket.histogram;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.Writeable;
-import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.BucketOrder;
@@ -28,8 +28,8 @@
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;
-import org.joda.time.DateTime;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -104,7 +104,7 @@ protected void assertReduced(InternalDateHistogram reduced, List expectedCounts = new TreeMap<>();
for (Histogram histogram : inputs) {
for (Histogram.Bucket bucket : histogram.getBuckets()) {
- expectedCounts.compute(((DateTime) bucket.getKey()).getMillis(),
+ expectedCounts.compute(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli(),
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount());
}
}
@@ -139,7 +139,7 @@ protected void assertReduced(InternalDateHistogram reduced, List actualCounts = new TreeMap<>();
for (Histogram.Bucket bucket : reduced.getBuckets()) {
- actualCounts.compute(((DateTime) bucket.getKey()).getMillis(),
+ actualCounts.compute(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli(),
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount());
}
assertEquals(expectedCounts, actualCounts);
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java
index d0027208b104b..78c4d18218eea 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java
@@ -41,9 +41,9 @@
import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregator;
import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper;
import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneOffset;
import java.util.Arrays;
import java.util.Collections;
import java.util.function.Consumer;
@@ -260,7 +260,7 @@ public void testWeightSetTimezone() throws IOException {
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build();
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder()
.setFieldName("weight_field")
- .setTimeZone(DateTimeZone.UTC)
+ .setTimeZone(ZoneOffset.UTC)
.build();
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
.value(valueConfig)
@@ -283,7 +283,7 @@ public void testWeightSetTimezone() throws IOException {
public void testValueSetTimezone() throws IOException {
MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder()
.setFieldName("value_field")
- .setTimeZone(DateTimeZone.UTC)
+ .setTimeZone(ZoneOffset.UTC)
.build();
MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build();
WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name")
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java
index 59af941812175..3ed1a15603e84 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java
@@ -28,6 +28,7 @@
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
@@ -40,9 +41,6 @@
import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram;
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.InternalAvg;
-import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
-import org.elasticsearch.search.aggregations.pipeline.AvgBucketPipelineAggregationBuilder;
-import org.elasticsearch.search.aggregations.pipeline.AvgBucketPipelineAggregator;
import java.io.IOException;
import java.util.ArrayList;
@@ -141,8 +139,7 @@ public void testSameAggNames() throws IOException {
}
}
-
private static long asLong(String dateTime) {
- return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(dateTime).getMillis();
+ return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli();
}
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java
index 7cb4371354c3b..22a4fdbdf67bf 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java
@@ -31,9 +31,9 @@
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
-import org.joda.time.DateTime;
import java.io.IOException;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -132,10 +132,10 @@ public void testEmptyBucketSort() {
assertThat(histogram, notNullValue());
// These become our baseline
List extends Histogram.Bucket> timeBuckets = histogram.getBuckets();
- DateTime previousKey = (DateTime) timeBuckets.get(0).getKey();
+ ZonedDateTime previousKey = (ZonedDateTime) timeBuckets.get(0).getKey();
for (Histogram.Bucket timeBucket : timeBuckets) {
- assertThat(previousKey, lessThanOrEqualTo((DateTime) timeBucket.getKey()));
- previousKey = (DateTime) timeBucket.getKey();
+ assertThat(previousKey, lessThanOrEqualTo((ZonedDateTime) timeBucket.getKey()));
+ previousKey = (ZonedDateTime) timeBucket.getKey();
}
// Now let's test using size
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java
index b1eec2b0f48ca..4b23304e642c0 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java
@@ -31,6 +31,7 @@
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
import org.elasticsearch.common.CheckedConsumer;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
@@ -365,6 +366,6 @@ private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consume
}
private static long asLong(String dateTime) {
- return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(dateTime).getMillis();
+ return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli();
}
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java
index 62fe7a2a45a60..db1ee6ab18916 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java
@@ -21,7 +21,8 @@
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.index.mapper.DateFieldMapper;
+import org.elasticsearch.common.time.DateFormatter;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
@@ -31,12 +32,14 @@
import org.elasticsearch.search.aggregations.support.AggregationPath;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matcher;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.DateTimeFormat;
import org.junit.After;
import java.io.IOException;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -60,19 +63,11 @@ public class DateDerivativeIT extends ESIntegTestCase {
private static final String IDX_DST_END = "idx_dst_end";
private static final String IDX_DST_KATHMANDU = "idx_dst_kathmandu";
- private DateTime date(int month, int day) {
- return new DateTime(2012, month, day, 0, 0, DateTimeZone.UTC);
+ private ZonedDateTime date(int month, int day) {
+ return ZonedDateTime.of(2012, month, day, 0, 0, 0, 0, ZoneOffset.UTC);
}
- private DateTime date(String date) {
- return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date);
- }
-
- private static String format(DateTime date, String pattern) {
- return DateTimeFormat.forPattern(pattern).print(date);
- }
-
- private static IndexRequestBuilder indexDoc(String idx, DateTime date, int value) throws Exception {
+ private static IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception {
return client().prepareIndex(idx, "type").setSource(
jsonBuilder().startObject().timeField("date", date).field("value", value).endObject());
}
@@ -124,27 +119,27 @@ public void testSingleValuedField() throws Exception {
List extends Bucket> buckets = deriv.getBuckets();
assertThat(buckets.size(), equalTo(3));
- DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
- assertThat((DateTime) bucket.getKey(), equalTo(key));
+ assertThat((ZonedDateTime) bucket.getKey(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
SimpleValue docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, nullValue());
- key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
- assertThat((DateTime) bucket.getKey(), equalTo(key));
+ assertThat((ZonedDateTime) bucket.getKey(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
assertThat(docCountDeriv.value(), equalTo(1d));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
- assertThat((DateTime) bucket.getKey(), equalTo(key));
+ assertThat((ZonedDateTime) bucket.getKey(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
@@ -166,28 +161,28 @@ public void testSingleValuedFieldNormalised() throws Exception {
List extends Bucket> buckets = deriv.getBuckets();
assertThat(buckets.size(), equalTo(3));
- DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
- assertThat((DateTime) bucket.getKey(), equalTo(key));
+ assertThat(bucket.getKey(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
Derivative docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, nullValue());
- key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
- assertThat((DateTime) bucket.getKey(), equalTo(key));
+ assertThat(bucket.getKey(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
assertThat(docCountDeriv.value(), closeTo(1d, 0.00001));
assertThat(docCountDeriv.normalizedValue(), closeTo(1d / 31d, 0.00001));
- key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
+ key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
- assertThat((DateTime) bucket.getKey(), equalTo(key));
+ assertThat(bucket.getKey(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
@@ -202,11 +197,14 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstStart() throws Excep
createIndex(IDX_DST_START);
List builders = new ArrayList<>();
- DateTimeZone timezone = DateTimeZone.forID("CET");
- addNTimes(1, IDX_DST_START, new DateTime("2012-03-24T01:00:00", timezone), builders);
- addNTimes(2, IDX_DST_START, new DateTime("2012-03-25T01:00:00", timezone), builders); // day with dst shift, only 23h long
- addNTimes(3, IDX_DST_START, new DateTime("2012-03-26T01:00:00", timezone), builders);
- addNTimes(4, IDX_DST_START, new DateTime("2012-03-27T01:00:00", timezone), builders);
+ ZoneId timezone = ZoneId.of("CET");
+ DateFormatter formatter = DateFormatter.forPattern("yyyy-MM-dd'T'HH:mm:ss").withZone(timezone);
+ // epoch millis: 1332547200000
+ addNTimes(1, IDX_DST_START, DateFormatters.toZonedDateTime(formatter.parse("2012-03-24T01:00:00")), builders);
+ // day with dst shift, only 23h long
+ addNTimes(2, IDX_DST_START, DateFormatters.toZonedDateTime(formatter.parse("2012-03-25T01:00:00")), builders);
+ addNTimes(3, IDX_DST_START, DateFormatters.toZonedDateTime(formatter.parse("2012-03-26T01:00:00")), builders);
+ addNTimes(4, IDX_DST_START, DateFormatters.toZonedDateTime(formatter.parse("2012-03-27T01:00:00")), builders);
indexRandom(true, builders);
ensureSearchable();
@@ -225,11 +223,23 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstStart() throws Excep
List extends Bucket> buckets = deriv.getBuckets();
assertThat(buckets.size(), equalTo(4));
- assertBucket(buckets.get(0), new DateTime("2012-03-24", timezone).toDateTime(DateTimeZone.UTC), 1L, nullValue(), null, null);
- assertBucket(buckets.get(1), new DateTime("2012-03-25", timezone).toDateTime(DateTimeZone.UTC), 2L, notNullValue(), 1d, 1d / 24d);
+ DateFormatter dateFormatter = DateFormatter.forPattern("yyyy-MM-dd");
+ ZonedDateTime expectedKeyFirstBucket =
+ LocalDate.from(dateFormatter.parse("2012-03-24")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null, null);
+
+ ZonedDateTime expectedKeySecondBucket =
+ LocalDate.from(dateFormatter.parse("2012-03-25")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(1), expectedKeySecondBucket,2L, notNullValue(), 1d, 1d / 24d);
+
// the following is normalized using a 23h bucket width
- assertBucket(buckets.get(2), new DateTime("2012-03-26", timezone).toDateTime(DateTimeZone.UTC), 3L, notNullValue(), 1d, 1d / 23d);
- assertBucket(buckets.get(3), new DateTime("2012-03-27", timezone).toDateTime(DateTimeZone.UTC), 4L, notNullValue(), 1d, 1d / 24d);
+ ZonedDateTime expectedKeyThirdBucket =
+ LocalDate.from(dateFormatter.parse("2012-03-26")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d, 1d / 23d);
+
+ ZonedDateTime expectedKeyFourthBucket =
+ LocalDate.from(dateFormatter.parse("2012-03-27")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d, 1d / 24d);
}
/**
@@ -237,13 +247,15 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstStart() throws Excep
*/
public void testSingleValuedFieldNormalised_timeZone_CET_DstEnd() throws Exception {
createIndex(IDX_DST_END);
- DateTimeZone timezone = DateTimeZone.forID("CET");
+ ZoneId timezone = ZoneId.of("CET");
List builders = new ArrayList<>();
- addNTimes(1, IDX_DST_END, new DateTime("2012-10-27T01:00:00", timezone), builders);
- addNTimes(2, IDX_DST_END, new DateTime("2012-10-28T01:00:00", timezone), builders); // day with dst shift -1h, 25h long
- addNTimes(3, IDX_DST_END, new DateTime("2012-10-29T01:00:00", timezone), builders);
- addNTimes(4, IDX_DST_END, new DateTime("2012-10-30T01:00:00", timezone), builders);
+ DateFormatter formatter = DateFormatter.forPattern("yyyy-MM-dd'T'HH:mm:ss").withZone(timezone);
+ addNTimes(1, IDX_DST_END, DateFormatters.toZonedDateTime(formatter.parse("2012-10-27T01:00:00")), builders);
+ // day with dst shift -1h, 25h long
+ addNTimes(2, IDX_DST_END, DateFormatters.toZonedDateTime(formatter.parse("2012-10-28T01:00:00")), builders);
+ addNTimes(3, IDX_DST_END, DateFormatters.toZonedDateTime(formatter.parse("2012-10-29T01:00:00")), builders);
+ addNTimes(4, IDX_DST_END, DateFormatters.toZonedDateTime(formatter.parse("2012-10-30T01:00:00")), builders);
indexRandom(true, builders);
ensureSearchable();
@@ -262,11 +274,24 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstEnd() throws Excepti
List extends Bucket> buckets = deriv.getBuckets();
assertThat(buckets.size(), equalTo(4));
- assertBucket(buckets.get(0), new DateTime("2012-10-27", timezone).toDateTime(DateTimeZone.UTC), 1L, nullValue(), null, null);
- assertBucket(buckets.get(1), new DateTime("2012-10-28", timezone).toDateTime(DateTimeZone.UTC), 2L, notNullValue(), 1d, 1d / 24d);
+ DateFormatter dateFormatter = DateFormatter.forPattern("yyyy-MM-dd").withZone(ZoneOffset.UTC);
+
+ ZonedDateTime expectedKeyFirstBucket =
+ LocalDate.from(dateFormatter.parse("2012-10-27")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null, null);
+
+ ZonedDateTime expectedKeySecondBucket =
+ LocalDate.from(dateFormatter.parse("2012-10-28")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(1), expectedKeySecondBucket, 2L, notNullValue(), 1d, 1d / 24d);
+
// the following is normalized using a 25h bucket width
- assertBucket(buckets.get(2), new DateTime("2012-10-29", timezone).toDateTime(DateTimeZone.UTC), 3L, notNullValue(), 1d, 1d / 25d);
- assertBucket(buckets.get(3), new DateTime("2012-10-30", timezone).toDateTime(DateTimeZone.UTC), 4L, notNullValue(), 1d, 1d / 24d);
+ ZonedDateTime expectedKeyThirdBucket =
+ LocalDate.from(dateFormatter.parse("2012-10-29")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d, 1d / 25d);
+
+ ZonedDateTime expectedKeyFourthBucket =
+ LocalDate.from(dateFormatter.parse("2012-10-30")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d, 1d / 24d);
}
/**
@@ -275,14 +300,15 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstEnd() throws Excepti
*/
public void testSingleValuedFieldNormalised_timeZone_AsiaKathmandu() throws Exception {
createIndex(IDX_DST_KATHMANDU);
- DateTimeZone timezone = DateTimeZone.forID("Asia/Kathmandu");
+ ZoneId timezone = ZoneId.of("Asia/Kathmandu");
List builders = new ArrayList<>();
- addNTimes(1, IDX_DST_KATHMANDU, new DateTime("1985-12-31T22:30:00", timezone), builders);
+ DateFormatter formatter = DateFormatter.forPattern("yyyy-MM-dd'T'HH:mm:ss").withZone(timezone);
+ addNTimes(1, IDX_DST_KATHMANDU, DateFormatters.toZonedDateTime(formatter.parse("1985-12-31T22:30:00")), builders);
// the shift happens during the next bucket, which includes the 45min that do not start on the full hour
- addNTimes(2, IDX_DST_KATHMANDU, new DateTime("1985-12-31T23:30:00", timezone), builders);
- addNTimes(3, IDX_DST_KATHMANDU, new DateTime("1986-01-01T01:30:00", timezone), builders);
- addNTimes(4, IDX_DST_KATHMANDU, new DateTime("1986-01-01T02:30:00", timezone), builders);
+ addNTimes(2, IDX_DST_KATHMANDU, DateFormatters.toZonedDateTime(formatter.parse("1985-12-31T23:30:00")), builders);
+ addNTimes(3, IDX_DST_KATHMANDU, DateFormatters.toZonedDateTime(formatter.parse("1986-01-01T01:30:00")), builders);
+ addNTimes(4, IDX_DST_KATHMANDU, DateFormatters.toZonedDateTime(formatter.parse("1986-01-01T02:30:00")), builders);
indexRandom(true, builders);
ensureSearchable();
@@ -301,27 +327,36 @@ public void testSingleValuedFieldNormalised_timeZone_AsiaKathmandu() throws Exce
List extends Bucket> buckets = deriv.getBuckets();
assertThat(buckets.size(), equalTo(4));
- assertBucket(buckets.get(0), new DateTime("1985-12-31T22:00:00", timezone).toDateTime(DateTimeZone.UTC), 1L, nullValue(), null,
- null);
- assertBucket(buckets.get(1), new DateTime("1985-12-31T23:00:00", timezone).toDateTime(DateTimeZone.UTC), 2L, notNullValue(), 1d,
- 1d / 60d);
+ DateFormatter dateFormatter = DateFormatter.forPattern("yyyy-MM-dd'T'HH:mm:ss").withZone(ZoneOffset.UTC);
+
+ ZonedDateTime expectedKeyFirstBucket =
+ LocalDateTime.from(dateFormatter.parse("1985-12-31T22:00:00")).atZone(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null,null);
+
+ ZonedDateTime expectedKeySecondBucket =
+ LocalDateTime.from(dateFormatter.parse("1985-12-31T23:00:00")).atZone(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(1), expectedKeySecondBucket, 2L, notNullValue(), 1d,1d / 60d);
+
// the following is normalized using a 105min bucket width
- assertBucket(buckets.get(2), new DateTime("1986-01-01T01:00:00", timezone).toDateTime(DateTimeZone.UTC), 3L, notNullValue(), 1d,
- 1d / 105d);
- assertBucket(buckets.get(3), new DateTime("1986-01-01T02:00:00", timezone).toDateTime(DateTimeZone.UTC), 4L, notNullValue(), 1d,
- 1d / 60d);
+ ZonedDateTime expectedKeyThirdBucket =
+ LocalDateTime.from(dateFormatter.parse("1986-01-01T01:00:00")).atZone(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d,1d / 105d);
+
+ ZonedDateTime expectedKeyFourthBucket =
+ LocalDateTime.from(dateFormatter.parse("1986-01-01T02:00:00")).atZone(timezone).withZoneSameInstant(ZoneOffset.UTC);
+ assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d,1d / 60d);
}
- private static void addNTimes(int amount, String index, DateTime dateTime, List builders) throws Exception {
+ private static void addNTimes(int amount, String index, ZonedDateTime dateTime, List builders) throws Exception {
for (int i = 0; i < amount; i++) {
builders.add(indexDoc(index, dateTime, 1));
}
}
- private static void assertBucket(Histogram.Bucket bucket, DateTime expectedKey, long expectedDocCount,
+ private static void assertBucket(Histogram.Bucket bucket, ZonedDateTime expectedKey, long expectedDocCount,
Matcher
* @param field the name of the date field to use for the date histogram (required)
* @param interval the interval to use for the date histogram (required)
@@ -229,23 +228,14 @@ public static DateHistogramGroupConfig fromXContent(final XContentParser parser)
}
private static Rounding createRounding(final String expr, final String timeZone) {
- DateTimeUnit timeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(expr);
+ Rounding.DateTimeUnit timeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(expr);
final Rounding.Builder rounding;
if (timeUnit != null) {
rounding = new Rounding.Builder(timeUnit);
} else {
rounding = new Rounding.Builder(TimeValue.parseTimeValue(expr, "createRounding"));
}
- rounding.timeZone(toDateTimeZone(timeZone));
+ rounding.timeZone(ZoneId.of(timeZone));
return rounding.build();
}
-
- private static DateTimeZone toDateTimeZone(final String timezone) {
- try {
- return DateTimeZone.forOffsetHours(Integer.parseInt(timezone));
- } catch (NumberFormatException e) {
- return DateTimeZone.forID(timezone);
- }
- }
-
}
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java
index f81c7955abbc4..e67baeaad3916 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java
@@ -91,7 +91,7 @@ public static DateTime parseDateMathOrNull(String fieldName, XContentParser pars
}
public static DateTime parseDateMath(String valueString, DateTimeZone timeZone, final Clock clock) {
- return new DateTime(dateMathParser.parse(valueString, clock::millis), timeZone);
+ return new DateTime(dateMathParser.parse(valueString, clock::millis).toEpochMilli(), timeZone);
}
public static DateTime parseDate(String fieldName, XContentParser parser, DateTimeZone timeZone) throws IOException {
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/RewriteCachingDirectoryReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/RewriteCachingDirectoryReaderTests.java
index 6812aca474749..2219a78055544 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/RewriteCachingDirectoryReaderTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/RewriteCachingDirectoryReaderTests.java
@@ -15,9 +15,9 @@
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.test.ESTestCase;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneOffset;
public class RewriteCachingDirectoryReaderTests extends ESTestCase {
@@ -92,15 +92,15 @@ public void testIsWithinQuery() throws IOException {
dateFieldType.setName("test");
QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), writableRegistry(), null, () -> 0);
MappedFieldType.Relation relation = dateFieldType.isFieldWithinQuery(cachingDirectoryReader, 0, 10,
- true, true, DateTimeZone.UTC, null, context);
+ true, true, ZoneOffset.UTC, null, context);
assertEquals(relation, MappedFieldType.Relation.WITHIN);
relation = dateFieldType.isFieldWithinQuery(cachingDirectoryReader, 3, 11,
- true, true, DateTimeZone.UTC, null, context);
+ true, true, ZoneOffset.UTC, null, context);
assertEquals(relation, MappedFieldType.Relation.INTERSECTS);
relation = dateFieldType.isFieldWithinQuery(cachingDirectoryReader, 10, 11,
- false, true, DateTimeZone.UTC, null, context);
+ false, true, ZoneOffset.UTC, null, context);
assertEquals(relation, MappedFieldType.Relation.DISJOINT);
}
}
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java
index 230fd75fbb958..47dad7e18eb32 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java
@@ -29,6 +29,7 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
@@ -51,11 +52,11 @@ public class TestUtils {
private static final DateMathParser dateMathParser = formatDateTimeFormatter.toDateMathParser();
public static String dateMathString(String time, final long now) {
- return formatDateTimeFormatter.formatMillis(dateMathParser.parse(time, () -> now));
+ return formatDateTimeFormatter.format(dateMathParser.parse(time, () -> now).atZone(ZoneOffset.UTC));
}
public static long dateMath(String time, final long now) {
- return dateMathParser.parse(time, () -> now);
+ return dateMathParser.parse(time, () -> now).toEpochMilli();
}
public static LicenseSpec generateRandomLicenseSpec(int version) {
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java
index cb2f13e804253..788870013885e 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java
@@ -48,15 +48,15 @@
import org.elasticsearch.xpack.core.ml.datafeed.ChunkingConfig.Mode;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import org.elasticsearch.xpack.core.ml.utils.ToXContentParams;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.TimeZone;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
@@ -449,7 +449,7 @@ public void testBuild_GivenHistogramWithDefaultInterval() {
public void testBuild_GivenDateHistogramWithInvalidTimeZone() {
MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time");
DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("bucket").field("time")
- .interval(300000L).timeZone(DateTimeZone.forTimeZone(TimeZone.getTimeZone("EST"))).subAggregation(maxTime);
+ .interval(300000L).timeZone(ZoneId.of("CET")).subAggregation(maxTime);
ElasticsearchException e = expectThrows(ElasticsearchException.class,
() -> createDatafeedWithDateHistogram(dateHistogram));
@@ -650,7 +650,7 @@ public void testSerializationOfComplexAggs() throws IOException {
new Script("params.bytes > 0 ? params.bytes : null"));
DateHistogramAggregationBuilder dateHistogram =
AggregationBuilders.dateHistogram("histogram_buckets")
- .field("timestamp").interval(300000).timeZone(DateTimeZone.UTC)
+ .field("timestamp").interval(300000).timeZone(ZoneOffset.UTC)
.subAggregation(maxTime)
.subAggregation(avgAggregationBuilder)
.subAggregation(derivativePipelineAggregationBuilder)
@@ -701,7 +701,7 @@ public void testSerializationOfComplexAggsBetweenVersions() throws IOException {
new Script("params.bytes > 0 ? params.bytes : null"));
DateHistogramAggregationBuilder dateHistogram =
AggregationBuilders.dateHistogram("histogram_buckets")
- .field("timestamp").interval(300000).timeZone(DateTimeZone.UTC)
+ .field("timestamp").interval(300000).timeZone(ZoneOffset.UTC)
.subAggregation(maxTime)
.subAggregation(avgAggregationBuilder)
.subAggregation(derivativePipelineAggregationBuilder)
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java
index 7770def0fae9a..532468216e5aa 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java
@@ -14,11 +14,12 @@
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
import org.elasticsearch.test.ESTestCase;
-import org.joda.time.DateTimeZone;
-import java.util.TimeZone;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
public class ExtractorUtilsTests extends ESTestCase {
@@ -73,13 +74,21 @@ public void testGetHistogramAggregation_MissingHistogramAgg() {
public void testGetHistogramIntervalMillis_GivenDateHistogramWithInvalidTimeZone() {
MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time");
DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("bucket").field("time")
- .interval(300000L).timeZone(DateTimeZone.forTimeZone(TimeZone.getTimeZone("EST"))).subAggregation(maxTime);
+ .interval(300000L).timeZone(ZoneId.of("CET")).subAggregation(maxTime);
ElasticsearchException e = expectThrows(ElasticsearchException.class,
() -> ExtractorUtils.getHistogramIntervalMillis(dateHistogram));
assertThat(e.getMessage(), equalTo("ML requires date_histogram.time_zone to be UTC"));
}
+ public void testGetHistogramIntervalMillis_GivenUtcTimeZones() {
+ MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time");
+ ZoneId zone = randomFrom(ZoneOffset.UTC, ZoneId.of("UTC"));
+ DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("bucket").field("time")
+ .interval(300000L).timeZone(zone).subAggregation(maxTime);
+ assertThat(ExtractorUtils.getHistogramIntervalMillis(dateHistogram), is(300_000L));
+ }
+
public void testIsHistogram() {
assertTrue(ExtractorUtils.isHistogram(AggregationBuilders.dateHistogram("time")));
assertTrue(ExtractorUtils.isHistogram(AggregationBuilders.histogram("time")));
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java
index d892eb550a17a..605ea6e901a90 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java
@@ -28,7 +28,7 @@
import static com.carrotsearch.randomizedtesting.generators.RandomNumbers.randomIntBetween;
import static com.carrotsearch.randomizedtesting.generators.RandomPicks.randomFrom;
import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiAlphanumOfLengthBetween;
-import static org.elasticsearch.test.ESTestCase.randomDateTimeZone;
+import static org.elasticsearch.test.ESTestCase.randomZone;
public class ConfigTestHelpers {
@@ -71,7 +71,7 @@ public static DateHistogramGroupConfig randomDateHistogramGroupConfig(final Rand
final String field = randomField(random);
final DateHistogramInterval interval = randomInterval();
final DateHistogramInterval delay = random.nextBoolean() ? randomInterval() : null;
- final String timezone = random.nextBoolean() ? randomDateTimeZone().toString() : null;
+ String timezone = random.nextBoolean() ? randomZone().getId() : null;
return new DateHistogramGroupConfig(field, interval, delay, timezone);
}
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfigSerializingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfigSerializingTests.java
index 415e1a00a60cf..95df682ff5e14 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfigSerializingTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfigSerializingTests.java
@@ -14,9 +14,9 @@
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@@ -155,28 +155,28 @@ public void testBwcSerialization() throws IOException {
DateHistogramInterval interval = new DateHistogramInterval(in);
String field = in.readString();
DateHistogramInterval delay = in.readOptionalWriteable(DateHistogramInterval::new);
- DateTimeZone timeZone = in.readTimeZone();
+ ZoneId timeZone = in.readZoneId();
- assertEqualInstances(reference, new DateHistogramGroupConfig(field, interval, delay, timeZone.getID()));
+ assertEqualInstances(reference, new DateHistogramGroupConfig(field, interval, delay, timeZone.getId()));
}
for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) {
final String field = ConfigTestHelpers.randomField(random());
final DateHistogramInterval interval = ConfigTestHelpers.randomInterval();
final DateHistogramInterval delay = randomBoolean() ? ConfigTestHelpers.randomInterval() : null;
- final DateTimeZone timezone = randomDateTimeZone();
+ final ZoneId timezone = randomZone();
// previous way to serialize a DateHistogramGroupConfig
final BytesStreamOutput out = new BytesStreamOutput();
interval.writeTo(out);
out.writeString(field);
out.writeOptionalWriteable(delay);
- out.writeTimeZone(timezone);
+ out.writeZoneId(timezone);
final StreamInput in = out.bytes().streamInput();
DateHistogramGroupConfig deserialized = new DateHistogramGroupConfig(in);
- assertEqualInstances(new DateHistogramGroupConfig(field, interval, delay, timezone.getID()), deserialized);
+ assertEqualInstances(new DateHistogramGroupConfig(field, interval, delay, timezone.getId()), deserialized);
}
}
}
diff --git a/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java b/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java
index 1453f59ed43e4..454a3eb06e5a7 100644
--- a/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java
+++ b/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java
@@ -13,8 +13,10 @@
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.xpack.ml.MachineLearning;
-import org.joda.time.DateTime;
+import java.time.Clock;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
@@ -266,7 +268,7 @@ public void testHRDSplit() throws Exception {
"\"time\": { \"type\": \"date\" } }");
// Index some data
- DateTime baseTime = new DateTime().minusYears(1);
+ ZonedDateTime baseTime = ZonedDateTime.now(Clock.systemDefaultZone()).minusYears(1);
TestConfiguration test = tests.get(randomInt(tests.size()-1));
// domainSplit() tests had subdomain, testHighestRegisteredDomainCases() did not, so we need a special case for sub
@@ -276,18 +278,20 @@ public void testHRDSplit() throws Exception {
for (int i = 0; i < 100; i++) {
- DateTime time = baseTime.plusHours(i);
+ ZonedDateTime time = baseTime.plusHours(i);
if (i == 64) {
// Anomaly has 100 docs, but we don't care about the value
for (int j = 0; j < 100; j++) {
- Request createDocRequest = new Request("PUT", "/painless/_doc/" + time.toDateTimeISO() + "_" + j);
- createDocRequest.setJsonEntity("{\"domain\": \"" + "bar.bar.com\", \"time\": \"" + time.toDateTimeISO() + "\"}");
+ String formattedTime = time.format(DateTimeFormatter.ISO_DATE_TIME);
+ Request createDocRequest = new Request("PUT", "/painless/_doc/" + formattedTime + "_" + j);
+ createDocRequest.setJsonEntity("{\"domain\": \"" + "bar.bar.com\", \"time\": \"" + formattedTime + "\"}");
client().performRequest(createDocRequest);
}
} else {
// Non-anomalous values will be what's seen when the anomaly is reported
- Request createDocRequest = new Request("PUT", "/painless/_doc/" + time.toDateTimeISO());
- createDocRequest.setJsonEntity("{\"domain\": \"" + test.hostName + "\", \"time\": \"" + time.toDateTimeISO() + "\"}");
+ String formattedTime = time.format(DateTimeFormatter.ISO_DATE_TIME);
+ Request createDocRequest = new Request("PUT", "/painless/_doc/" + formattedTime);
+ createDocRequest.setJsonEntity("{\"domain\": \"" + test.hostName + "\", \"time\": \"" + formattedTime + "\"}");
client().performRequest(createDocRequest);
}
}
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java
index 190933b1e9316..5b9852ba4fddc 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java
@@ -16,9 +16,9 @@
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.ml.action.DeleteExpiredDataAction;
-import org.joda.time.DateTime;
-import org.joda.time.chrono.ISOChronology;
+import java.time.Clock;
+import java.time.ZonedDateTime;
import java.util.Objects;
import java.util.Random;
import java.util.concurrent.ScheduledFuture;
@@ -70,9 +70,14 @@ public MlDailyMaintenanceService(ClusterName clusterName, ThreadPool threadPool,
private static TimeValue delayToNextTime(ClusterName clusterName) {
Random random = new Random(clusterName.hashCode());
int minutesOffset = random.ints(0, MAX_TIME_OFFSET_MINUTES).findFirst().getAsInt();
- DateTime now = DateTime.now(ISOChronology.getInstance());
- DateTime next = now.plusDays(1).withTimeAtStartOfDay().plusMinutes(30).plusMinutes(minutesOffset);
- return TimeValue.timeValueMillis(next.getMillis() - now.getMillis());
+
+ ZonedDateTime now = ZonedDateTime.now(Clock.systemDefaultZone());
+ ZonedDateTime next = now.plusDays(1)
+ .toLocalDate()
+ .atStartOfDay(now.getZone())
+ .plusMinutes(30)
+ .plusMinutes(minutesOffset);
+ return TimeValue.timeValueMillis(next.toInstant().toEpochMilli() - now.toInstant().toEpochMilli());
}
public void start() {
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java
index 35878f1199586..85f2489e6b0e5 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java
@@ -128,7 +128,6 @@ Long runLookBack(long startTime, Long endTime) throws Exception {
auditor.info(jobId, msg);
LOGGER.info("[{}] {}", jobId, msg);
-
FlushJobAction.Request request = new FlushJobAction.Request(jobId);
request.setCalcInterim(true);
run(lookbackStartTimeMs, lookbackEnd, request);
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java
index 86fe439ac16cb..f8fa3b1874808 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java
@@ -17,11 +17,11 @@
import org.elasticsearch.xpack.core.ml.action.GetBucketsAction;
import org.elasticsearch.xpack.core.ml.action.util.PageParams;
import org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils;
-import org.elasticsearch.xpack.ml.datafeed.delayeddatacheck.DelayedDataDetectorFactory.BucketWithMissingData;
import org.elasticsearch.xpack.core.ml.job.results.Bucket;
import org.elasticsearch.xpack.core.ml.utils.Intervals;
-import org.joda.time.DateTime;
+import org.elasticsearch.xpack.ml.datafeed.delayeddatacheck.DelayedDataDetectorFactory.BucketWithMissingData;
+import java.time.ZonedDateTime;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
@@ -131,8 +131,8 @@ private Map checkCurrentBucketEventCount(long start, long end) {
}
private static long toHistogramKeyToEpoch(Object key) {
- if (key instanceof DateTime) {
- return ((DateTime)key).getMillis();
+ if (key instanceof ZonedDateTime) {
+ return ((ZonedDateTime)key).toInstant().toEpochMilli();
} else if (key instanceof Double) {
return ((Double)key).longValue();
} else if (key instanceof Long){
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java
index db8dea22675f2..8cf3ed39651d6 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java
@@ -21,10 +21,10 @@
import org.elasticsearch.search.aggregations.metrics.Percentiles;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
-import org.joda.time.DateTime;
import java.io.IOException;
import java.io.OutputStream;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
@@ -210,15 +210,15 @@ private void processDateHistogram(Histogram agg) throws IOException {
}
/*
- * Date Histograms have a {@link DateTime} object as the key,
+ * Date Histograms have a {@link ZonedDateTime} object as the key,
* Histograms have either a Double or Long.
*/
private long toHistogramKeyToEpoch(Object key) {
- if (key instanceof DateTime) {
- return ((DateTime)key).getMillis();
+ if (key instanceof ZonedDateTime) {
+ return ((ZonedDateTime)key).toInstant().toEpochMilli();
} else if (key instanceof Double) {
return ((Double)key).longValue();
- } else if (key instanceof Long){
+ } else if (key instanceof Long) {
return (Long)key;
} else {
throw new IllegalStateException("Histogram key [" + key + "] cannot be converted to a timestamp");
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java
index 232cd53a359ce..4223bff49825e 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java
@@ -8,7 +8,6 @@
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
-import org.joda.time.base.BaseDateTime;
import java.util.List;
import java.util.Map;
@@ -112,8 +111,6 @@ public Object[] value(SearchHit hit) {
}
if (value[0] instanceof String) { // doc_value field with the epoch_millis format
value[0] = Long.parseLong((String) value[0]);
- } else if (value[0] instanceof BaseDateTime) { // script field
- value[0] = ((BaseDateTime) value[0]).getMillis();
} else if (value[0] instanceof Long == false) { // pre-6.0 field
throw new IllegalStateException("Unexpected value for a time field: " + value[0].getClass());
}
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java
index 204ae42720433..dd9a6229ec887 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java
@@ -14,8 +14,8 @@
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.results.OverallBucket;
import org.elasticsearch.xpack.core.ml.job.results.Result;
-import org.joda.time.DateTime;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@@ -64,8 +64,8 @@ public List computeOverallBuckets(Histogram histogram) {
}
private static Date getHistogramBucketTimestamp(Histogram.Bucket bucket) {
- DateTime bucketTimestamp = (DateTime) bucket.getKey();
- return new Date(bucketTimestamp.getMillis());
+ ZonedDateTime bucketTimestamp = (ZonedDateTime) bucket.getKey();
+ return new Date(bucketTimestamp.toInstant().toEpochMilli());
}
static class TopNScores extends PriorityQueue {
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java
index b595c564ab9aa..be50114fc46e0 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java
@@ -15,9 +15,9 @@
import org.elasticsearch.xpack.core.ml.job.results.Result;
import org.elasticsearch.xpack.ml.job.persistence.BatchedJobsIterator;
import org.elasticsearch.xpack.ml.utils.VolatileCursorIterator;
-import org.joda.time.DateTime;
-import org.joda.time.chrono.ISOChronology;
+import java.time.Clock;
+import java.time.Instant;
import java.util.Deque;
import java.util.Iterator;
import java.util.List;
@@ -71,7 +71,7 @@ private WrappedBatchedJobsIterator newJobIterator() {
}
private long calcCutoffEpochMs(long retentionDays) {
- long nowEpochMs = DateTime.now(ISOChronology.getInstance()).getMillis();
+ long nowEpochMs = Instant.now(Clock.systemDefaultZone()).toEpochMilli();
return nowEpochMs - new TimeValue(retentionDays, TimeUnit.DAYS).getMillis();
}
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java
index 3de9795deb335..3225a7eb9212e 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java
@@ -35,11 +35,11 @@
import org.elasticsearch.xpack.core.ml.job.results.ForecastRequestStats;
import org.elasticsearch.xpack.core.ml.job.results.Result;
import org.elasticsearch.xpack.ml.MachineLearning;
-import org.joda.time.DateTime;
-import org.joda.time.chrono.ISOChronology;
import java.io.IOException;
import java.io.InputStream;
+import java.time.Clock;
+import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
@@ -66,7 +66,7 @@ public class ExpiredForecastsRemover implements MlDataRemover {
public ExpiredForecastsRemover(Client client, ThreadPool threadPool) {
this.client = Objects.requireNonNull(client);
this.threadPool = Objects.requireNonNull(threadPool);
- this.cutoffEpochMs = DateTime.now(ISOChronology.getInstance()).getMillis();
+ this.cutoffEpochMs = Instant.now(Clock.systemDefaultZone()).toEpochMilli();
}
@Override
diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldTests.java
index 1e5e6fa652db1..ad999daafb254 100644
--- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldTests.java
+++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldTests.java
@@ -8,9 +8,7 @@
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.xpack.ml.datafeed.extractor.fields.ExtractedField;
import org.elasticsearch.xpack.ml.test.SearchHitBuilder;
-import org.joda.time.DateTime;
import java.util.Arrays;
@@ -98,16 +96,16 @@ public void testNewTimeFieldGivenSource() {
expectThrows(IllegalArgumentException.class, () -> ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.SOURCE));
}
- public void testValueGivenTimeField() {
+ public void testValueGivenStringTimeField() {
final long millis = randomLong();
- final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", new DateTime(millis)).build();
+ final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", Long.toString(millis)).build();
final ExtractedField timeField = ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.DOC_VALUE);
assertThat(timeField.value(hit), equalTo(new Object[] { millis }));
}
- public void testValueGivenStringTimeField() {
+ public void testValueGivenLongTimeField() {
final long millis = randomLong();
- final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", Long.toString(millis)).build();
+ final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", millis).build();
final ExtractedField timeField = ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.DOC_VALUE);
assertThat(timeField.value(hit), equalTo(new Object[] { millis }));
}
diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java
index 1fd6db3de566a..20dd49029b3ea 100644
--- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java
+++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java
@@ -17,7 +17,6 @@
import org.elasticsearch.xpack.core.ml.job.config.Detector;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.ml.test.SearchHitBuilder;
-import org.joda.time.DateTime;
import java.util.Arrays;
import java.util.Collections;
@@ -64,13 +63,6 @@ public void testAllTypesOfFields() {
assertThat(extractedFields.getSourceFields(), equalTo(new String[] {"src1", "src2"}));
}
- public void testTimeFieldValue() {
- long millis = randomLong();
- SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", new DateTime(millis)).build();
- TimeBasedExtractedFields extractedFields = new TimeBasedExtractedFields(timeField, Collections.singletonList(timeField));
- assertThat(extractedFields.timeFieldValue(hit), equalTo(millis));
- }
-
public void testStringTimeFieldValue() {
long millis = randomLong();
SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", Long.toString(millis)).build();
diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java
index 6e256680eca55..f6f75fe722dac 100644
--- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java
+++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java
@@ -6,7 +6,7 @@
package org.elasticsearch.xpack.ml.filestructurefinder;
import org.elasticsearch.common.collect.Tuple;
-import org.elasticsearch.common.time.DateFormatters;
+import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.xpack.ml.filestructurefinder.TimestampFormatFinder.TimestampMatch;
import java.util.Arrays;
@@ -316,7 +316,7 @@ private void validateJavaTimestampFormats(List javaTimestampFormats, Str
String timestampFormat = javaTimestampFormats.get(i);
switch (timestampFormat) {
case "ISO8601":
- parsed = DateFormatters.forPattern("strict_date_optional_time_nanos").withZone(defaultZone).parse(text);
+ parsed = DateFormatter.forPattern("strict_date_optional_time_nanos").withZone(defaultZone).parse(text);
break;
default:
java.time.format.DateTimeFormatter parser = new java.time.format.DateTimeFormatterBuilder()
diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java
index 505a2b871da0b..ee331a99006ed 100644
--- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java
+++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java
@@ -289,11 +289,16 @@ private Bucket createBucket(boolean isInterim) {
return bucket;
}
+ private Date randomDate() {
+ // between 1970 and 2065
+ return new Date(randomLongBetween(0, 3000000000000L));
+ }
+
private List createRecords(boolean isInterim) {
List records = new ArrayList<>();
int count = randomIntBetween(0, 100);
- Date now = new Date(randomNonNegativeLong());
+ Date now = randomDate();
for (int i=0; i {
@@ -48,7 +47,7 @@ protected AutodetectResult createTestInstance() {
FlushAcknowledgement flushAcknowledgement;
String jobId = "foo";
if (randomBoolean()) {
- bucket = new Bucket(jobId, new Date(randomNonNegativeLong()), randomNonNegativeLong());
+ bucket = new Bucket(jobId, randomDate(), randomNonNegativeLong());
} else {
bucket = null;
}
@@ -56,7 +55,7 @@ protected AutodetectResult createTestInstance() {
int size = randomInt(10);
records = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
- AnomalyRecord record = new AnomalyRecord(jobId, new Date(randomLong()), randomNonNegativeLong());
+ AnomalyRecord record = new AnomalyRecord(jobId, randomDate(), randomNonNegativeLong());
record.setProbability(randomDoubleBetween(0.0, 1.0, true));
records.add(record);
}
@@ -67,7 +66,7 @@ protected AutodetectResult createTestInstance() {
influencers = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
Influencer influencer = new Influencer(jobId, randomAlphaOfLength(10), randomAlphaOfLength(10),
- new Date(randomNonNegativeLong()), randomNonNegativeLong());
+ randomDate(), randomNonNegativeLong());
influencer.setProbability(randomDoubleBetween(0.0, 1.0, true));
influencers.add(influencer);
}
@@ -89,12 +88,13 @@ protected AutodetectResult createTestInstance() {
modelSizeStats = null;
}
if (randomBoolean()) {
- modelPlot = new ModelPlot(jobId, new Date(randomLong()), randomNonNegativeLong(), randomInt());
+ modelPlot = new ModelPlot(jobId, randomDate(), randomNonNegativeLong(), randomInt());
} else {
modelPlot = null;
}
if (randomBoolean()) {
- forecast = new Forecast(jobId, randomAlphaOfLength(20), new Date(randomLong()), randomNonNegativeLong(), randomInt());
+ forecast = new Forecast(jobId, randomAlphaOfLength(20), randomDate(),
+ randomNonNegativeLong(), randomInt());
} else {
forecast = null;
}
@@ -110,7 +110,8 @@ protected AutodetectResult createTestInstance() {
categoryDefinition = null;
}
if (randomBoolean()) {
- flushAcknowledgement = new FlushAcknowledgement(randomAlphaOfLengthBetween(1, 20), new Date(randomNonNegativeLong()));
+ flushAcknowledgement = new FlushAcknowledgement(randomAlphaOfLengthBetween(1, 20),
+ randomDate());
} else {
flushAcknowledgement = null;
}
diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/BucketTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/BucketTests.java
index 65343b0a068ac..a49ef0a5e26fa 100644
--- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/BucketTests.java
+++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/BucketTests.java
@@ -33,7 +33,7 @@ public Bucket createTestInstance() {
}
public Bucket createTestInstance(String jobId) {
- Bucket bucket = new Bucket(jobId, new Date(randomNonNegativeLong()), randomNonNegativeLong());
+ Bucket bucket = new Bucket(jobId, randomDate(), randomNonNegativeLong());
if (randomBoolean()) {
bucket.setAnomalyScore(randomDouble());
}
@@ -92,7 +92,7 @@ protected Bucket doParseInstance(XContentParser parser) {
}
public void testEquals_GivenDifferentClass() {
- Bucket bucket = new Bucket("foo", new Date(randomLong()), randomNonNegativeLong());
+ Bucket bucket = new Bucket("foo", randomDate(), randomNonNegativeLong());
assertFalse(bucket.equals("a string"));
}
diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ForecastTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ForecastTests.java
index b1d9f37dcb4f2..a5c15716ea293 100644
--- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ForecastTests.java
+++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ForecastTests.java
@@ -26,7 +26,7 @@ protected Forecast createTestInstance() {
public Forecast createTestInstance(String jobId) {
Forecast forecast =
- new Forecast(jobId, randomAlphaOfLength(20), new Date(randomLong()),
+ new Forecast(jobId, randomAlphaOfLength(20), randomDate(),
randomNonNegativeLong(), randomInt());
if (randomBoolean()) {
diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ModelPlotTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ModelPlotTests.java
index 2a5ceb8363b8a..37788bfa203d2 100644
--- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ModelPlotTests.java
+++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ModelPlotTests.java
@@ -30,7 +30,7 @@ protected ModelPlot createTestInstance() {
public ModelPlot createTestInstance(String jobId) {
ModelPlot modelPlot =
- new ModelPlot(jobId, new Date(randomLong()), randomNonNegativeLong(), randomInt());
+ new ModelPlot(jobId, randomDate(), randomNonNegativeLong(), randomInt());
if (randomBoolean()) {
modelPlot.setByFieldName(randomAlphaOfLengthBetween(1, 20));
}
@@ -73,14 +73,16 @@ protected ModelPlot doParseInstance(XContentParser parser) {
public void testEquals_GivenSameObject() {
ModelPlot modelPlot =
- new ModelPlot(randomAlphaOfLength(15), new Date(randomLong()), randomNonNegativeLong(), randomInt());
+ new ModelPlot(randomAlphaOfLength(15),
+ randomDate(), randomNonNegativeLong(), randomInt());
assertTrue(modelPlot.equals(modelPlot));
}
public void testEquals_GivenObjectOfDifferentClass() {
ModelPlot modelPlot =
- new ModelPlot(randomAlphaOfLength(15), new Date(randomLong()), randomNonNegativeLong(), randomInt());
+ new ModelPlot(randomAlphaOfLength(15),
+ randomDate(), randomNonNegativeLong(), randomInt());
assertFalse(modelPlot.equals("a string"));
}
diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/OverallBucketTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/OverallBucketTests.java
index 42b29cb5ee224..b6c0a99685d0b 100644
--- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/OverallBucketTests.java
+++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/OverallBucketTests.java
@@ -26,7 +26,7 @@ protected OverallBucket createTestInstance() {
for (int i = 0; i < jobCount; ++i) {
jobs.add(new OverallBucket.JobInfo(JobTests.randomValidJobId(), randomDoubleBetween(0.0, 100.0, true)));
}
- return new OverallBucket(new Date(randomNonNegativeLong()),
+ return new OverallBucket(new Date(randomLongBetween(0, 3000000000000L)),
randomIntBetween(60, 24 * 3600),
randomDoubleBetween(0.0, 100.0, true),
jobs,
@@ -47,4 +47,4 @@ public void testCompareTo() {
assertThat(jobInfo1.compareTo(jobInfo3), lessThan(0));
assertThat(jobInfo2.compareTo(jobInfo3), lessThan(0));
}
-}
\ No newline at end of file
+}
diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringTestUtils.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringTestUtils.java
index 368758654cb9b..647835bf9311e 100644
--- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringTestUtils.java
+++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringTestUtils.java
@@ -87,7 +87,8 @@ public static MonitoringBulkDoc randomMonitoringBulkDoc(final Random random,
final MonitoredSystem system,
final String type) throws IOException {
final String id = random.nextBoolean() ? RandomStrings.randomAsciiLettersOfLength(random, 5) : null;
- final long timestamp = RandomNumbers.randomLongBetween(random, 0L, Long.MAX_VALUE);
+ // ending date is the last second of 9999, should be sufficient
+ final long timestamp = RandomNumbers.randomLongBetween(random, 0L, 253402300799000L);
final long interval = RandomNumbers.randomLongBetween(random, 0L, Long.MAX_VALUE);
return new MonitoringBulkDoc(system, type, id, timestamp, interval, source, xContentType);
}
diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java
index c23ef3c8ee51c..6caefe148b28a 100644
--- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java
+++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java
@@ -61,7 +61,7 @@
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE,
numDataNodes = 1, numClientNodes = 0, transportClientRatio = 0.0, supportsDedicatedMasters = false)
public class LocalExporterIntegTests extends LocalExporterIntegTestCase {
- private final String indexTimeFormat = randomFrom("YY", "YYYY", "YYYY.MM", "YYYY-MM", "MM.YYYY", "MM", null);
+ private final String indexTimeFormat = randomFrom("yy", "yyyy", "yyyy.MM", "yyyy-MM", "MM.yyyy", "MM", null);
private void stopMonitoring() {
// Now disabling the monitoring service, so that no more collection are started
diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java
index 232034177e87b..59141d2a83aeb 100644
--- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java
+++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java
@@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.rollup;
-import org.elasticsearch.common.rounding.DateTimeUnit;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
@@ -16,7 +16,6 @@
import org.elasticsearch.xpack.core.rollup.RollupField;
import org.elasticsearch.xpack.core.rollup.action.RollupJobCaps;
import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig;
-import org.joda.time.DateTimeZone;
import java.util.ArrayList;
import java.util.Comparator;
@@ -98,7 +97,7 @@ private static void checkDateHisto(DateHistogramAggregationBuilder source, List<
DateHistogramInterval interval = new DateHistogramInterval((String)agg.get(RollupField.INTERVAL));
String thisTimezone = (String)agg.get(DateHistogramGroupConfig.TIME_ZONE);
- String sourceTimeZone = source.timeZone() == null ? DateTimeZone.UTC.toString() : source.timeZone().toString();
+ String sourceTimeZone = source.timeZone() == null ? "UTC" : source.timeZone().toString();
// Ensure we are working on the same timezone
if (thisTimezone.equalsIgnoreCase(sourceTimeZone) == false) {
@@ -152,10 +151,10 @@ static boolean validateCalendarInterval(DateHistogramInterval requestInterval,
// The request must be gte the config. The CALENDAR_ORDERING map values are integers representing
// relative orders between the calendar units
- DateTimeUnit requestUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(requestInterval.toString());
- long requestOrder = requestUnit.field(DateTimeZone.UTC).getDurationField().getUnitMillis();
- DateTimeUnit configUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(configInterval.toString());
- long configOrder = configUnit.field(DateTimeZone.UTC).getDurationField().getUnitMillis();
+ Rounding.DateTimeUnit requestUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(requestInterval.toString());
+ long requestOrder = requestUnit.getField().getBaseUnit().getDuration().toMillis();
+ Rounding.DateTimeUnit configUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(configInterval.toString());
+ long configOrder = configUnit.getField().getBaseUnit().getDuration().toMillis();
// All calendar units are multiples naturally, so we just care about gte
return requestOrder >= configOrder;
@@ -387,8 +386,8 @@ private static Comparator getComparator() {
static long getMillisFixedOrCalendar(String value) {
DateHistogramInterval interval = new DateHistogramInterval(value);
if (isCalendarInterval(interval)) {
- DateTimeUnit intervalUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(interval.toString());
- return intervalUnit.field(DateTimeZone.UTC).getDurationField().getUnitMillis();
+ Rounding.DateTimeUnit intervalUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(interval.toString());
+ return intervalUnit.getField().getBaseUnit().getDuration().toMillis();
} else {
return TimeValue.parseTimeValue(value, "date_histo.comparator.interval").getMillis();
}
diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java
index ee29e56a33169..1d5f9093a29df 100644
--- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java
+++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java
@@ -28,9 +28,9 @@
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
+import org.elasticsearch.xpack.core.indexing.AsyncTwoPhaseIndexer;
import org.elasticsearch.xpack.core.indexing.IndexerState;
import org.elasticsearch.xpack.core.indexing.IterationResult;
-import org.elasticsearch.xpack.core.indexing.AsyncTwoPhaseIndexer;
import org.elasticsearch.xpack.core.rollup.RollupField;
import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig;
import org.elasticsearch.xpack.core.rollup.job.GroupConfig;
@@ -42,6 +42,7 @@
import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig;
import org.joda.time.DateTimeZone;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -214,7 +215,7 @@ public static List> createValueSourceBuilders(fi
final DateHistogramValuesSourceBuilder dateHistogramBuilder = new DateHistogramValuesSourceBuilder(dateHistogramName);
dateHistogramBuilder.dateHistogramInterval(dateHistogram.getInterval());
dateHistogramBuilder.field(dateHistogramField);
- dateHistogramBuilder.timeZone(toDateTimeZone(dateHistogram.getTimeZone()));
+ dateHistogramBuilder.timeZone(ZoneId.of(dateHistogram.getTimeZone()));
return Collections.singletonList(dateHistogramBuilder);
}
diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java
index 95161e0d149dc..d05a78e121296 100644
--- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java
+++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java
@@ -25,6 +25,7 @@
import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig;
import org.joda.time.DateTimeZone;
+import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
@@ -122,14 +123,14 @@ public void testIncompatibleFixedCalendarInterval() {
}
public void testBadTimeZone() {
- final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"), null, "EST"));
+ final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"), null, "CET"));
final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null);
RollupJobCaps cap = new RollupJobCaps(job);
Set caps = singletonSet(cap);
DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo")
.dateHistogramInterval(new DateHistogramInterval("1h"))
- .timeZone(DateTimeZone.UTC);
+ .timeZone(ZoneOffset.UTC);
RuntimeException e = expectThrows(RuntimeException.class, () -> RollupJobIdentifierUtils.findBestJobs(builder, caps));
assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [date_histogram] agg on field " +
diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java
index 3dc91ede1bd2c..0032b5a88a563 100644
--- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java
+++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java
@@ -147,7 +147,7 @@ public void testRangeWrongTZ() {
Set caps = new HashSet<>();
caps.add(cap);
Exception e = expectThrows(IllegalArgumentException.class,
- () -> TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1).timeZone("EST"), caps));
+ () -> TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1).timeZone("CET"), caps));
assertThat(e.getMessage(), equalTo("Field [foo] in [range] query was found in rollup indices, but requested timezone is not " +
"compatible. Options include: [UTC]"));
}
diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java
index bd8a0b19f8250..9f8796f4c9589 100644
--- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java
+++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java
@@ -15,6 +15,7 @@
import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig;
import org.joda.time.DateTimeZone;
+import java.time.zone.ZoneRulesException;
import java.util.HashMap;
import java.util.Map;
@@ -84,9 +85,9 @@ public void testDefaultTimeZone() {
}
public void testUnknownTimeZone() {
- Exception e = expectThrows(IllegalArgumentException.class,
+ Exception e = expectThrows(ZoneRulesException.class,
() -> new DateHistogramGroupConfig("foo", DateHistogramInterval.HOUR, null, "FOO"));
- assertThat(e.getMessage(), equalTo("The datetime zone id 'FOO' is not recognised"));
+ assertThat(e.getMessage(), equalTo("Unknown time-zone ID: FOO"));
}
public void testEmptyHistoField() {
diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java
index b87b1f3761fdb..cdabb36d42760 100644
--- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java
+++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java
@@ -29,7 +29,7 @@
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchResponseSections;
import org.elasticsearch.action.search.ShardSearchFailure;
-import org.elasticsearch.common.rounding.Rounding;
+import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.IndexSettings;
@@ -58,12 +58,14 @@
import org.junit.Before;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
@@ -144,22 +146,22 @@ public void testDateHistoAndMetrics() throws Exception {
final List