From 2ba9e361aba7df6e9c0b5fa8fae3d219d50ddf09 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Tue, 22 Jan 2019 12:38:55 -0500 Subject: [PATCH 01/24] Add helper classes to determine if aggs have a value (#36020) This adds a set of helper classes to determine if an agg "has a value". This is needed because InternalAggs represent "empty" in different manners according to convention. Some use `NaN`, `+/- Inf`, `0.0`, etc. A user can pass the Internal agg type to one of these helper methods and it will report if the agg contains a value or not, which allows the user to differentiate "empty" from a real `NaN`. These helpers are best-effort in some cases. For example, several pipeline aggs share a single return class but use different conventions to mark "empty", so the helper uses the loosest definition that applies to all the aggs that use the class. Sums in particular are unreliable. The InternalSum simply returns 0.0 if the agg is empty (which is correct, no values == sum of zero). But this also means the helper cannot differentiate from "empty" and `+1 + -1`. --- .../MatrixAggregationInspectionHelper.java | 29 +++ .../stats/MatrixStatsAggregatorTests.java | 5 +- .../JoinAggregationInspectionHelper.java | 34 +++ .../ChildrenToParentAggregatorTests.java | 3 + .../ParentToChildrenAggregatorTests.java | 1 + .../bucket/composite/InternalComposite.java | 2 +- .../bucket/filter/InternalFilter.java | 2 +- .../bucket/geogrid/GeoGridBucket.java | 2 +- .../bucket/sampler/InternalSampler.java | 2 +- .../AbstractInternalHDRPercentiles.java | 4 + .../AbstractInternalTDigestPercentiles.java | 4 + .../metrics/InternalCardinality.java | 4 + .../InternalMedianAbsoluteDeviation.java | 2 +- .../metrics/InternalScriptedMetric.java | 4 + .../metrics/MetricInspectionHelper.java | 78 ++++++ .../BucketScriptPipelineAggregator.java | 5 +- .../CumulativeSumPipelineAggregator.java | 3 +- .../pipeline/MovAvgPipelineAggregator.java | 14 +- .../support/AggregationInspectionHelper.java | 242 ++++++++++++++++++ .../bucket/filter/FilterAggregatorTests.java | 7 + .../bucket/filter/FiltersAggregatorTests.java | 11 + .../geogrid/GeoHashGridAggregatorTests.java | 4 + .../AutoDateHistogramAggregatorTests.java | 53 +++- .../DateHistogramAggregatorTests.java | 45 +++- .../histogram/HistogramAggregatorTests.java | 22 +- .../missing/MissingAggregatorTests.java | 13 +- .../bucket/nested/NestedAggregatorTests.java | 31 ++- .../sampler/SamplerAggregatorTests.java | 4 +- .../SignificantTextAggregatorTests.java | 14 +- .../bucket/terms/TermsAggregatorTests.java | 12 + .../metrics/AvgAggregatorTests.java | 10 +- .../metrics/CardinalityAggregatorTests.java | 7 + .../metrics/ExtendedStatsAggregatorTests.java | 4 + .../metrics/GeoBoundsAggregatorTests.java | 3 + .../metrics/GeoCentroidAggregatorTests.java | 4 + .../HDRPercentileRanksAggregatorTests.java | 3 + .../HDRPercentilesAggregatorTests.java | 7 + .../metrics/MaxAggregatorTests.java | 7 + ...edianAbsoluteDeviationAggregatorTests.java | 31 ++- .../metrics/MinAggregatorTests.java | 5 + .../metrics/StatsAggregatorTests.java | 4 + .../metrics/SumAggregatorTests.java | 37 ++- ...TDigestPercentileRanksAggregatorTests.java | 3 + .../TDigestPercentilesAggregatorTests.java | 7 + .../metrics/TopHitsAggregatorTests.java | 7 + .../metrics/ValueCountAggregatorTests.java | 40 ++- .../metrics/WeightedAvgAggregatorTests.java | 13 + .../CumulativeSumAggregatorTests.java | 25 ++ .../extractor/TestMultiValueAggregation.java | 2 +- 49 files changed, 777 insertions(+), 98 deletions(-) create mode 100644 modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixAggregationInspectionHelper.java create mode 100644 modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/JoinAggregationInspectionHelper.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationInspectionHelper.java diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixAggregationInspectionHelper.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixAggregationInspectionHelper.java new file mode 100644 index 0000000000000..c62c91477cdef --- /dev/null +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixAggregationInspectionHelper.java @@ -0,0 +1,29 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.matrix.stats; + +/** + * Counterpart to {@link org.elasticsearch.search.aggregations.support.AggregationInspectionHelper}, providing + * helpers for some aggs in the MatrixStats package + */ +public class MatrixAggregationInspectionHelper { + public static boolean hasValue(InternalMatrixStats agg) { + return agg.getResults() != null; + } +} diff --git a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java index aa778e6f704f9..0512f3d5db3b6 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java +++ b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java @@ -53,10 +53,12 @@ public void testNoData() throws Exception { .fields(Collections.singletonList("field")); InternalMatrixStats stats = search(searcher, new MatchAllDocsQuery(), aggBuilder, ft); assertNull(stats.getStats()); + assertFalse(MatrixAggregationInspectionHelper.hasValue(stats)); } } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37587") public void testTwoFields() throws Exception { String fieldA = "a"; MappedFieldType ftA = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); @@ -87,8 +89,9 @@ public void testTwoFields() throws Exception { IndexSearcher searcher = new IndexSearcher(reader); MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg") .fields(Arrays.asList(fieldA, fieldB)); - InternalMatrixStats stats = search(searcher, new MatchAllDocsQuery(), aggBuilder, ftA, ftB); + InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ftA, ftB); multiPassStats.assertNearlyEqual(new MatrixStatsResults(stats.getStats())); + assertTrue(MatrixAggregationInspectionHelper.hasValue(stats)); } } } diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/JoinAggregationInspectionHelper.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/JoinAggregationInspectionHelper.java new file mode 100644 index 0000000000000..08d137f4fb90d --- /dev/null +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/JoinAggregationInspectionHelper.java @@ -0,0 +1,34 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.join.aggregations; + +/** + * Counterpart to {@link org.elasticsearch.search.aggregations.support.AggregationInspectionHelper}, providing + * helpers for some aggs in the Join package + */ +public class JoinAggregationInspectionHelper { + + public static boolean hasValue(InternalParent agg) { + return agg.getDocCount() > 0; + } + + public static boolean hasValue(InternalChildren agg) { + return agg.getDocCount() > 0; + } +} diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java index 685c872fa72d4..c8c7200df6114 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java @@ -90,6 +90,7 @@ public void testNoDocs() throws IOException { assertEquals(0, childrenToParent.getDocCount()); assertNotNull("Aggregations: " + childrenToParent.getAggregations().asMap(), parentAggregation); assertEquals(Double.POSITIVE_INFINITY, ((InternalMin) parentAggregation).getValue(), Double.MIN_VALUE); + assertFalse(JoinAggregationInspectionHelper.hasValue(childrenToParent)); }); indexReader.close(); directory.close(); @@ -119,6 +120,7 @@ public void testParentChild() throws IOException { parent.getAggregations().asMap(), expectedTotalParents, parent.getDocCount()); assertEquals(expectedMinValue, ((InternalMin) parent.getAggregations().get("in_parent")).getValue(), Double.MIN_VALUE); + assertTrue(JoinAggregationInspectionHelper.hasValue(parent)); }); // verify for each children @@ -170,6 +172,7 @@ public void testParentChildTerms() throws IOException { // verify a terms-aggregation inside the parent-aggregation testCaseTerms(new MatchAllDocsQuery(), indexSearcher, parent -> { assertNotNull(parent); + assertTrue(JoinAggregationInspectionHelper.hasValue(parent)); LongTerms valueTerms = parent.getAggregations().get("value_terms"); assertNotNull(valueTerms); diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java index 452fe1b490b02..9023f3f0485ba 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java @@ -105,6 +105,7 @@ public void testParentChild() throws IOException { expectedMinValue = Math.min(expectedMinValue, expectedValues.v2()); } assertEquals(expectedTotalChildren, child.getDocCount()); + assertTrue(JoinAggregationInspectionHelper.hasValue(child)); assertEquals(expectedMinValue, ((InternalMin) child.getAggregations().get("in_child")).getValue(), Double.MIN_VALUE); }); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 26fcfed9a262f..822fdd7ba31a5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -226,7 +226,7 @@ InternalBucket next() { } } - static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket + public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket implements CompositeAggregation.Bucket, KeyComparable { private final CompositeKey key; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilter.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilter.java index 5ee89ee05eef0..f5127c31bce65 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilter.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilter.java @@ -50,4 +50,4 @@ public String getWriteableName() { protected InternalSingleBucketAggregation newAggregation(String name, long docCount, InternalAggregations subAggregations) { return new InternalFilter(name, docCount, subAggregations, pipelineAggregators(), getMetaData()); } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridBucket.java index 8246d629bd527..850bc6e7bafcb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridBucket.java @@ -34,7 +34,7 @@ import java.util.List; import java.util.Objects; -class GeoGridBucket extends InternalMultiBucketAggregation.InternalBucket implements GeoHashGrid.Bucket, Comparable { +public class GeoGridBucket extends InternalMultiBucketAggregation.InternalBucket implements GeoHashGrid.Bucket, Comparable { protected long geohashAsLong; protected long docCount; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSampler.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSampler.java index 1a04133e82bfc..da72fec8511be 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSampler.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/InternalSampler.java @@ -59,4 +59,4 @@ protected InternalSingleBucketAggregation newAggregation(String name, long docCo InternalAggregations subAggregations) { return new InternalSampler(name, docCount, subAggregations, pipelineAggregators(), metaData); } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java index 7050254f2793f..b3bbe6b911c48 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java @@ -98,6 +98,10 @@ public long getEstimatedMemoryFootprint() { return state.getEstimatedFootprintInBytes(); } + DoubleHistogram getState() { + return state; + } + @Override public AbstractInternalHDRPercentiles doReduce(List aggregations, ReduceContext reduceContext) { DoubleHistogram merged = null; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java index 6e6ff3cf3a88b..cc63e5f7a4325 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java @@ -82,6 +82,10 @@ public long getEstimatedMemoryFootprint() { return state.byteSize(); } + TDigestState getState() { + return state; + } + @Override public AbstractInternalTDigestPercentiles doReduce(List aggregations, ReduceContext reduceContext) { TDigestState merged = null; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCardinality.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCardinality.java index b3fcb33a4fb84..144777379b5ad 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCardinality.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCardinality.java @@ -79,6 +79,10 @@ public long getValue() { return counts == null ? 0 : counts.cardinality(0); } + HyperLogLogPlusPlus getCounts() { + return counts; + } + @Override public InternalAggregation doReduce(List aggregations, ReduceContext reduceContext) { InternalCardinality reduced = null; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java index 01ab3a323e12d..ace9edb13f515 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java @@ -121,7 +121,7 @@ public String getWriteableName() { return MedianAbsoluteDeviationAggregationBuilder.NAME; } - public TDigestState getValuesSketch() { + TDigestState getValuesSketch() { return valuesSketch; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java index ec2419e03ab3c..a0bb5e74fd5cb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java @@ -80,6 +80,10 @@ public Object aggregation() { return aggregation.get(0); } + List getAggregation() { + return aggregation; + } + @Override public InternalAggregation doReduce(List aggregations, ReduceContext reduceContext) { List aggregationObjects = new ArrayList<>(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java new file mode 100644 index 0000000000000..88812cda9899a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.metrics; + +import org.elasticsearch.search.aggregations.pipeline.InternalDerivative; + +/** + * Counterpart to {@link org.elasticsearch.search.aggregations.support.AggregationInspectionHelper}, providing + * helpers for some aggs that have package-private getters. AggregationInspectionHelper delegates to these + * helpers when needed, and consumers should prefer to use AggregationInspectionHelper instead of these + * helpers. + */ +public class MetricInspectionHelper { + + public static boolean hasValue(InternalAvg agg) { + return agg.getCount() > 0; + } + + public static boolean hasValue(InternalCardinality agg) { + return agg.getCounts() != null; + } + + public static boolean hasValue(InternalHDRPercentileRanks agg) { + return agg.getState().getTotalCount() > 0; + } + + public static boolean hasValue(InternalHDRPercentiles agg) { + return agg.getState().getTotalCount() > 0; + } + + public static boolean hasValue(InternalMedianAbsoluteDeviation agg) { + return agg.getValuesSketch().size() > 0; + } + + public static boolean hasValue(InternalScriptedMetric agg) { + // TODO better way to know if the scripted metric received documents? + // Could check for null too, but a script might return null on purpose... + return agg.getAggregation().size() > 0 ; + } + + public static boolean hasValue(InternalTDigestPercentileRanks agg) { + return agg.getState().size() > 0; + } + + public static boolean hasValue(InternalTDigestPercentiles agg) { + return agg.getState().size() > 0; + } + + public static boolean hasValue(InternalTopHits agg) { + return (agg.getHits().getTotalHits().value == 0 + && Double.isNaN(agg.getHits().getMaxScore()) + && Double.isNaN(agg.getTopDocs().maxScore)) == false; + } + + public static boolean hasValue(InternalWeightedAvg agg) { + return (agg.getSum() == 0.0 && agg.getWeight() == 0L) == false; + } + + public static boolean hasValue(InternalDerivative agg) { + return true; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java index d19425e3e0359..996c9b2f0e12f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java @@ -114,7 +114,10 @@ public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext } else { final List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map( (p) -> (InternalAggregation) p).collect(Collectors.toList()); - aggs.add(new InternalSimpleValue(name(), returned.doubleValue(), formatter, new ArrayList<>(), metaData())); + + InternalSimpleValue simpleValue = new InternalSimpleValue(name(), returned.doubleValue(), + formatter, new ArrayList<>(), metaData()); + aggs.add(simpleValue); InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(new InternalAggregations(aggs), bucket); newBuckets.add(newBucket); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java index a70144b421a48..54e772b0d8e82 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java @@ -83,8 +83,7 @@ public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext sum += thisBucketValue; } - List aggs = StreamSupport - .stream(bucket.getAggregations().spliterator(), false) + List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) .map((p) -> (InternalAggregation) p) .collect(Collectors.toList()); aggs.add(new InternalSimpleValue(name(), sum, formatter, new ArrayList<>(), metaData())); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregator.java index 10d1cdc5a71cd..1f36c989c163d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregator.java @@ -117,7 +117,7 @@ public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext // since we only change newBucket if we can add to it Bucket newBucket = bucket; - if (!(thisBucketValue == null || thisBucketValue.equals(Double.NaN))) { + if ((thisBucketValue == null || thisBucketValue.equals(Double.NaN)) == false) { // Some models (e.g. HoltWinters) have certain preconditions that must be met if (model.hasValue(values.size())) { @@ -126,7 +126,7 @@ public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) .map((p) -> (InternalAggregation) p) .collect(Collectors.toList()); - aggs.add(new InternalSimpleValue(name(), movavg, formatter, new ArrayList(), metaData())); + aggs.add(new InternalSimpleValue(name(), movavg, formatter, new ArrayList<>(), metaData())); newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), new InternalAggregations(aggs)); } @@ -153,10 +153,10 @@ public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext Bucket bucket = newBuckets.get(lastValidPosition + i + 1); // Get the existing aggs in the bucket so we don't clobber data - aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> { - return (InternalAggregation) p; - }).collect(Collectors.toList()); - aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, new ArrayList(), metaData())); + aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) + .map((p) -> (InternalAggregation) p) + .collect(Collectors.toList()); + aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, new ArrayList<>(), metaData())); Bucket newBucket = factory.createBucket(newKey, bucket.getDocCount(), new InternalAggregations(aggs)); @@ -166,7 +166,7 @@ public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext } else { // Not seen before, create fresh aggs = new ArrayList<>(); - aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, new ArrayList(), metaData())); + aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, new ArrayList<>(), metaData())); Bucket newBucket = factory.createBucket(newKey, 0, new InternalAggregations(aggs)); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationInspectionHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationInspectionHelper.java new file mode 100644 index 0000000000000..c41fa29bde3db --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationInspectionHelper.java @@ -0,0 +1,242 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.support; + +import org.elasticsearch.search.aggregations.bucket.adjacency.InternalAdjacencyMatrix; +import org.elasticsearch.search.aggregations.bucket.composite.InternalComposite; +import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; +import org.elasticsearch.search.aggregations.bucket.filter.InternalFilters; +import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGrid; +import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; +import org.elasticsearch.search.aggregations.bucket.histogram.InternalAutoDateHistogram; +import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; +import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; +import org.elasticsearch.search.aggregations.bucket.missing.InternalMissing; +import org.elasticsearch.search.aggregations.bucket.nested.InternalNested; +import org.elasticsearch.search.aggregations.bucket.nested.InternalReverseNested; +import org.elasticsearch.search.aggregations.bucket.range.InternalRange; +import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler; +import org.elasticsearch.search.aggregations.bucket.sampler.UnmappedSampler; +import org.elasticsearch.search.aggregations.bucket.significant.InternalSignificantTerms; +import org.elasticsearch.search.aggregations.bucket.significant.UnmappedSignificantTerms; +import org.elasticsearch.search.aggregations.bucket.terms.InternalTerms; +import org.elasticsearch.search.aggregations.bucket.terms.UnmappedTerms; +import org.elasticsearch.search.aggregations.metrics.InternalAvg; +import org.elasticsearch.search.aggregations.metrics.InternalCardinality; +import org.elasticsearch.search.aggregations.metrics.InternalExtendedStats; +import org.elasticsearch.search.aggregations.metrics.InternalGeoBounds; +import org.elasticsearch.search.aggregations.metrics.InternalGeoCentroid; +import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentiles; +import org.elasticsearch.search.aggregations.metrics.InternalMax; +import org.elasticsearch.search.aggregations.metrics.InternalMedianAbsoluteDeviation; +import org.elasticsearch.search.aggregations.metrics.InternalMin; +import org.elasticsearch.search.aggregations.metrics.InternalScriptedMetric; +import org.elasticsearch.search.aggregations.metrics.InternalStats; +import org.elasticsearch.search.aggregations.metrics.InternalSum; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; +import org.elasticsearch.search.aggregations.metrics.InternalTopHits; +import org.elasticsearch.search.aggregations.metrics.InternalValueCount; +import org.elasticsearch.search.aggregations.metrics.InternalWeightedAvg; +import org.elasticsearch.search.aggregations.metrics.MetricInspectionHelper; +import org.elasticsearch.search.aggregations.pipeline.InternalBucketMetricValue; +import org.elasticsearch.search.aggregations.pipeline.InternalPercentilesBucket; +import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; + +import java.util.stream.StreamSupport; + +/** + * Provides a set of static helpers to determine if a particular type of InternalAggregation "has a value" + * or not. This can be difficult to determine from an external perspective because each agg uses + * different internal bookkeeping to determine if it is empty or not (NaN, +/-Inf, 0.0, etc). + * + * This set of helpers aim to ease that task by codifying what "empty" is for each agg. + * + * It is not entirely accurate for all aggs, since some do not expose or track the needed state + * (e.g. sum doesn't record count, so it's not clear if the sum is 0.0 because it is empty + * or because of summing to zero). Pipeline aggs in particular are not well supported + * by these helpers since most share InternalSimpleValue and it's not clear which pipeline + * generated the value. + */ +public class AggregationInspectionHelper { + public static , B extends InternalTerms.Bucket> boolean hasValue(InternalTerms agg) { + return agg.getBuckets().stream().anyMatch(bucket -> bucket.getDocCount() > 0); + } + + public static boolean hasValue(UnmappedTerms agg) { + return false; + } + + public static boolean hasValue(UnmappedSignificantTerms agg) { + return false; + } + + public static boolean hasValue(UnmappedSampler agg) { + return false; + } + + public static boolean hasValue(InternalAdjacencyMatrix agg) { + return agg.getBuckets().stream().anyMatch(bucket -> bucket.getDocCount() > 0); + } + + public static boolean hasValue(InternalFilters agg) { + return agg.getBuckets().stream().anyMatch(bucket -> bucket.getDocCount() > 0); + } + + public static boolean hasValue(InternalFilter agg) { + return agg.getDocCount() > 0; + } + + public static boolean hasValue(InternalGeoHashGrid agg) { + return agg.getBuckets().stream().anyMatch(bucket -> bucket.getDocCount() > 0); + } + + public static boolean hasValue(InternalGlobal agg) { + return agg.getDocCount() > 0; + } + + public static boolean hasValue(InternalHistogram agg) { + return agg.getBuckets().stream().anyMatch(bucket -> bucket.getDocCount() > 0); + } + + public static boolean hasValue(InternalDateHistogram agg) { + return agg.getBuckets().stream().anyMatch(bucket -> bucket.getDocCount() > 0); + } + + public static boolean hasValue(InternalAutoDateHistogram agg) { + return agg.getBuckets().stream().anyMatch(bucket -> bucket.getDocCount() > 0); + } + + public static boolean hasValue(InternalComposite agg) { + return agg.getBuckets().stream().anyMatch(bucket -> bucket.getDocCount() > 0); + } + + public static boolean hasValue(InternalMissing agg) { + return agg.getDocCount() > 0; + } + + public static boolean hasValue(InternalNested agg) { + return agg.getDocCount() > 0; + } + + public static boolean hasValue(InternalReverseNested agg) { + return agg.getDocCount() > 0; + } + + public static > boolean hasValue(InternalRange agg) { + return agg.getBuckets().stream().anyMatch(bucket -> bucket.getDocCount() > 0); + } + + public static boolean hasValue(InternalSampler agg) { + return agg.getDocCount() > 0; + } + + public static , + B extends InternalSignificantTerms.Bucket> boolean hasValue(InternalSignificantTerms agg) { + return agg.getBuckets().stream().anyMatch(bucket -> bucket.getDocCount() > 0); + } + + public static boolean hasValue(InternalAvg agg) { + return MetricInspectionHelper.hasValue(agg); + } + + public static boolean hasValue(InternalSum agg) { + // TODO this could be incorrect... e.g. +1 + -1 + return agg.getValue() != 0.0; + } + + public static boolean hasValue(InternalCardinality agg) { + return MetricInspectionHelper.hasValue(agg); + } + + public static boolean hasValue(InternalExtendedStats agg) { + return agg.getCount() > 0; + } + + public static boolean hasValue(InternalGeoBounds agg) { + return (agg.topLeft() == null && agg.bottomRight() == null) == false; + } + + public static boolean hasValue(InternalGeoCentroid agg) { + return agg.centroid() != null && agg.count() > 0; + } + + public static boolean hasValue(InternalHDRPercentileRanks agg) { + return MetricInspectionHelper.hasValue(agg); + } + + public static boolean hasValue(InternalHDRPercentiles agg) { + return MetricInspectionHelper.hasValue(agg); + } + + public static boolean hasValue(InternalMax agg) { + return agg.getValue() != Double.NEGATIVE_INFINITY; + } + + public static boolean hasValue(InternalMedianAbsoluteDeviation agg) { + return MetricInspectionHelper.hasValue(agg); + } + + public static boolean hasValue(InternalMin agg) { + return agg.getValue() != Double.POSITIVE_INFINITY; + } + + public static boolean hasValue(InternalScriptedMetric agg) { + return MetricInspectionHelper.hasValue(agg); + } + + public static boolean hasValue(InternalStats agg) { + return agg.getCount() > 0; + } + + public static boolean hasValue(InternalTDigestPercentileRanks agg) { + return MetricInspectionHelper.hasValue(agg); + } + + public static boolean hasValue(InternalTDigestPercentiles agg) { + return MetricInspectionHelper.hasValue(agg); + } + + public static boolean hasValue(InternalTopHits agg) { + return MetricInspectionHelper.hasValue(agg); + } + + public static boolean hasValue(InternalValueCount agg) { + return agg.getValue() > 0; + } + + public static boolean hasValue(InternalWeightedAvg agg) { + return MetricInspectionHelper.hasValue(agg); + } + + public static boolean hasValue(InternalSimpleValue agg) { + // This is a coarse approximation, since some aggs use positive/negative infinity or NaN + return (Double.isInfinite(agg.getValue()) || Double.isNaN(agg.getValue())) == false; + } + + public static boolean hasValue(InternalBucketMetricValue agg) { + return Double.isInfinite(agg.value()) == false; + } + + public static boolean hasValue(InternalPercentilesBucket agg) { + return StreamSupport.stream(agg.spliterator(), false).allMatch(p -> Double.isNaN(p.getValue())) == false; + } + +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java index d92fb7ff62e43..f5b5d187e4187 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.junit.Before; public class FilterAggregatorTests extends AggregatorTestCase { @@ -57,6 +58,7 @@ public void testEmpty() throws Exception { InternalFilter response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); assertEquals(response.getDocCount(), 0); + assertFalse(AggregationInspectionHelper.hasValue(response)); indexReader.close(); directory.close(); } @@ -96,6 +98,11 @@ public void testRandom() throws Exception { response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); } assertEquals(response.getDocCount(), (long) expectedBucketCount[value]); + if (expectedBucketCount[expectedBucketCount[value]] > 0) { + assertTrue(AggregationInspectionHelper.hasValue(response)); + } else { + assertFalse(AggregationInspectionHelper.hasValue(response)); + } } indexReader.close(); directory.close(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java index 05ed091978270..ff5cb84482db0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.junit.Before; import java.util.HashSet; @@ -68,6 +69,7 @@ public void testEmpty() throws Exception { for (InternalFilters.InternalBucket filter : response.getBuckets()) { assertEquals(filter.getDocCount(), 0); } + assertFalse(AggregationInspectionHelper.hasValue(response)); indexReader.close(); directory.close(); } @@ -129,6 +131,7 @@ public void testKeyedFilter() throws Exception { assertEquals(filters.getBucketByKey("bar").getDocCount(), 1); assertEquals(filters.getBucketByKey("same").getDocCount(), 1); assertEquals(filters.getBucketByKey("other").getDocCount(), 2); + assertTrue(AggregationInspectionHelper.hasValue(filters)); } indexReader.close(); @@ -185,14 +188,22 @@ public void testRandom() throws Exception { List buckets = response.getBuckets(); assertEquals(buckets.size(), filters.length+1); + int sum = 0; for (InternalFilters.InternalBucket bucket : buckets) { if ("other".equals(bucket.getKey())) { assertEquals(bucket.getDocCount(), expectedOtherCount); } else { int index = Integer.parseInt(bucket.getKey()); assertEquals(bucket.getDocCount(), (long) expectedBucketCount[filterTerms[index]]); + sum += expectedBucketCount[filterTerms[index]]; } } + if (sum > 0) { + assertTrue(AggregationInspectionHelper.hasValue(response)); + } else { + assertFalse(AggregationInspectionHelper.hasValue(response)); + } + } indexReader.close(); directory.close(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java index 45b6b64cddc35..2d270f8298ff1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import java.io.IOException; import java.util.ArrayList; @@ -53,6 +54,7 @@ public void testNoDocs() throws IOException { // Intentionally not writing any docs }, geoHashGrid -> { assertEquals(0, geoHashGrid.getBuckets().size()); + assertFalse(AggregationInspectionHelper.hasValue(geoHashGrid)); }); } @@ -61,6 +63,7 @@ public void testFieldMissing() throws IOException { iw.addDocument(Collections.singleton(new LatLonDocValuesField(FIELD_NAME, 10D, 10D))); }, geoHashGrid -> { assertEquals(0, geoHashGrid.getBuckets().size()); + assertFalse(AggregationInspectionHelper.hasValue(geoHashGrid)); }); } @@ -94,6 +97,7 @@ public void testWithSeveralDocs() throws IOException { for (GeoHashGrid.Bucket bucket : geoHashGrid.getBuckets()) { assertEquals((long) expectedCountPerGeoHash.get(bucket.getKeyAsString()), bucket.getDocCount()); } + assertTrue(AggregationInspectionHelper.hasValue(geoHashGrid)); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java index 2d5109405dc1c..6b4d1482adb5e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java @@ -38,7 +38,8 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; -import org.elasticsearch.search.aggregations.metrics.Stats; +import org.elasticsearch.search.aggregations.metrics.InternalStats; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.hamcrest.Matchers; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -75,18 +76,27 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase { public void testMatchNoDocs() throws IOException { testBothCases(new MatchNoDocsQuery(), DATES_WITH_TIME, aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD), - histogram -> assertEquals(0, histogram.getBuckets().size()) + histogram -> { + assertEquals(0, histogram.getBuckets().size()); + assertFalse(AggregationInspectionHelper.hasValue(histogram)); + } ); } public void testMatchAllDocs() throws IOException { testSearchCase(DEFAULT_QUERY, DATES_WITH_TIME, aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD), - histogram -> assertEquals(10, histogram.getBuckets().size()) + histogram -> { + assertEquals(10, histogram.getBuckets().size()); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); + } ); testSearchAndReduceCase(DEFAULT_QUERY, DATES_WITH_TIME, aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD), - histogram -> assertEquals(8, histogram.getBuckets().size()) + histogram -> { + assertEquals(8, histogram.getBuckets().size()); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); + } ); } @@ -95,16 +105,18 @@ public void testSubAggregations() throws IOException { aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD) .subAggregation(AggregationBuilders.stats("stats").field(DATE_FIELD)), histogram -> { + assertTrue(AggregationInspectionHelper.hasValue(histogram)); final List buckets = histogram.getBuckets(); assertEquals(8, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2010-01-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); - Stats stats = bucket.getAggregations().get("stats"); + InternalStats stats = bucket.getAggregations().get("stats"); assertEquals("2010-03-12T01:07:45.000Z", stats.getMinAsString()); assertEquals("2010-04-27T03:43:34.000Z", stats.getMaxAsString()); assertEquals(2L, stats.getCount()); + assertTrue(AggregationInspectionHelper.hasValue(stats)); bucket = buckets.get(1); assertEquals("2011-01-01T00:00:00.000Z", bucket.getKeyAsString()); @@ -113,6 +125,7 @@ public void testSubAggregations() throws IOException { assertTrue(Double.isInfinite(stats.getMin())); assertTrue(Double.isInfinite(stats.getMax())); assertEquals(0L, stats.getCount()); + assertFalse(AggregationInspectionHelper.hasValue(stats)); bucket = buckets.get(2); assertEquals("2012-01-01T00:00:00.000Z", bucket.getKeyAsString()); @@ -121,6 +134,7 @@ public void testSubAggregations() throws IOException { assertEquals("2012-05-18T04:11:00.000Z", stats.getMinAsString()); assertEquals("2012-05-18T04:11:00.000Z", stats.getMaxAsString()); assertEquals(1L, stats.getCount()); + assertTrue(AggregationInspectionHelper.hasValue(stats)); bucket = buckets.get(3); assertEquals("2013-01-01T00:00:00.000Z", bucket.getKeyAsString()); @@ -129,6 +143,7 @@ public void testSubAggregations() throws IOException { assertEquals("2013-05-29T05:11:31.000Z", stats.getMinAsString()); assertEquals("2013-10-31T08:24:05.000Z", stats.getMaxAsString()); assertEquals(2L, stats.getCount()); + assertTrue(AggregationInspectionHelper.hasValue(stats)); bucket = buckets.get(4); assertEquals("2014-01-01T00:00:00.000Z", bucket.getKeyAsString()); @@ -137,6 +152,7 @@ public void testSubAggregations() throws IOException { assertTrue(Double.isInfinite(stats.getMin())); assertTrue(Double.isInfinite(stats.getMax())); assertEquals(0L, stats.getCount()); + assertFalse(AggregationInspectionHelper.hasValue(stats)); bucket = buckets.get(5); assertEquals("2015-01-01T00:00:00.000Z", bucket.getKeyAsString()); @@ -145,6 +161,7 @@ public void testSubAggregations() throws IOException { assertEquals("2015-02-13T13:09:32.000Z", stats.getMinAsString()); assertEquals("2015-11-13T16:14:34.000Z", stats.getMaxAsString()); assertEquals(3L, stats.getCount()); + assertTrue(AggregationInspectionHelper.hasValue(stats)); bucket = buckets.get(6); assertEquals("2016-01-01T00:00:00.000Z", bucket.getKeyAsString()); @@ -153,6 +170,7 @@ public void testSubAggregations() throws IOException { assertEquals("2016-03-04T17:09:50.000Z", stats.getMinAsString()); assertEquals("2016-03-04T17:09:50.000Z", stats.getMaxAsString()); assertEquals(1L, stats.getCount()); + assertTrue(AggregationInspectionHelper.hasValue(stats)); bucket = buckets.get(7); assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); @@ -161,6 +179,7 @@ public void testSubAggregations() throws IOException { assertEquals("2017-12-12T22:55:46.000Z", stats.getMinAsString()); assertEquals("2017-12-12T22:55:46.000Z", stats.getMaxAsString()); assertEquals(1L, stats.getCount()); + assertTrue(AggregationInspectionHelper.hasValue(stats)); }); } @@ -169,7 +188,10 @@ public void testNoDocs() throws IOException { final Consumer aggregation = agg -> agg.setNumBuckets(10).field(DATE_FIELD); testSearchCase(DEFAULT_QUERY, dates, aggregation, - histogram -> assertEquals(0, histogram.getBuckets().size()) + histogram -> { + assertEquals(0, histogram.getBuckets().size()); + assertFalse(AggregationInspectionHelper.hasValue(histogram)); + } ); testSearchAndReduceCase(DEFAULT_QUERY, dates, aggregation, Assert::assertNull @@ -179,7 +201,10 @@ public void testNoDocs() throws IOException { public void testAggregateWrongField() throws IOException { testBothCases(DEFAULT_QUERY, DATES_WITH_TIME, aggregation -> aggregation.setNumBuckets(10).field("wrong_field"), - histogram -> assertEquals(0, histogram.getBuckets().size()) + histogram -> { + assertEquals(0, histogram.getBuckets().size()); + assertFalse(AggregationInspectionHelper.hasValue(histogram)); + } ); } @@ -197,6 +222,7 @@ public void testIntervalYear() throws IOException { assertEquals(DATES_WITH_TIME.get(5 + i), bucket.getKey()); assertEquals(1, bucket.getDocCount()); } + assertTrue(AggregationInspectionHelper.hasValue(histogram)); } ); testSearchAndReduceCase(rangeQuery, DATES_WITH_TIME, @@ -211,6 +237,7 @@ public void testIntervalYear() throws IOException { assertEquals(expectedDocCount.size(), buckets.size()); buckets.forEach(bucket -> assertEquals(expectedDocCount.getOrDefault(bucket.getKey(), 0).longValue(), bucket.getDocCount())); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); } ); } @@ -244,6 +271,7 @@ public void testIntervalMonth() throws IOException { assertEquals(expectedDocCount.size(), buckets.size()); buckets.forEach(bucket -> assertEquals(expectedDocCount.getOrDefault(bucket.getKey(), 0).longValue(), bucket.getDocCount())); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); } ); } @@ -287,6 +315,7 @@ public void testIntervalDay() throws IOException { assertEquals(5, buckets.size()); buckets.forEach(bucket -> assertEquals(expectedDocCount.getOrDefault(bucket.getKey(), 0).longValue(), bucket.getDocCount())); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); } ); } @@ -311,6 +340,7 @@ public void testIntervalDayWithTZ() throws IOException { assertEquals(expectedDocCount.size(), buckets.size()); buckets.forEach(bucket -> assertEquals(expectedDocCount.getOrDefault(bucket.getKeyAsString(), 0).longValue(), bucket.getDocCount())); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); }); testSearchAndReduceCase(DEFAULT_QUERY, datesForDayInterval, aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), histogram -> { @@ -323,6 +353,7 @@ public void testIntervalDayWithTZ() throws IOException { assertEquals(5, buckets.size()); buckets.forEach(bucket -> assertEquals(expectedDocCount.getOrDefault(bucket.getKeyAsString(), 0).longValue(), bucket.getDocCount())); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); }); } @@ -683,19 +714,19 @@ public void testIntervalSecond() throws IOException { private void testSearchCase(final Query query, final List dataset, final Consumer configure, - final Consumer verify) throws IOException { + final Consumer verify) throws IOException { executeTestCase(false, query, dataset, configure, verify); } private void testSearchAndReduceCase(final Query query, final List dataset, final Consumer configure, - final Consumer verify) throws IOException { + final Consumer verify) throws IOException { executeTestCase(true, query, dataset, configure, verify); } private void testBothCases(final Query query, final List dataset, final Consumer configure, - final Consumer verify) throws IOException { + final Consumer verify) throws IOException { executeTestCase(false, query, dataset, configure, verify); executeTestCase(true, query, dataset, configure, verify); } @@ -716,7 +747,7 @@ protected IndexSettings createIndexSettings() { private void executeTestCase(final boolean reduced, final Query query, final List dataset, final Consumer configure, - final Consumer verify) throws IOException { + final Consumer verify) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { final Document document = new Document(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java index 4e6004444a63e..c1b9396664a22 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.MultiBucketConsumerService.TooManyBucketsException; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import java.io.IOException; import java.util.Arrays; @@ -61,7 +62,10 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase { public void testMatchNoDocs() throws IOException { testBothCases(new MatchNoDocsQuery(), dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), - histogram -> assertEquals(0, histogram.getBuckets().size()) + histogram -> { + assertEquals(0, histogram.getBuckets().size()); + assertFalse(AggregationInspectionHelper.hasValue(histogram)); + } ); } @@ -70,15 +74,24 @@ public void testMatchAllDocs() throws IOException { testSearchCase(query, dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), - histogram -> assertEquals(6, histogram.getBuckets().size()) + histogram -> { + assertEquals(6, histogram.getBuckets().size()); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); + } ); testSearchAndReduceCase(query, dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), - histogram -> assertEquals(8, histogram.getBuckets().size()) + histogram -> { + assertEquals(8, histogram.getBuckets().size()); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); + } ); testBothCases(query, dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD).minDocCount(1L), - histogram -> assertEquals(6, histogram.getBuckets().size()) + histogram -> { + assertEquals(6, histogram.getBuckets().size()); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); + } ); } @@ -89,7 +102,10 @@ public void testNoDocs() throws IOException { agg.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD); testSearchCase(query, dates, aggregation, - histogram -> assertEquals(0, histogram.getBuckets().size()) + histogram -> { + assertEquals(0, histogram.getBuckets().size()); + assertFalse(AggregationInspectionHelper.hasValue(histogram)); + } ); testSearchAndReduceCase(query, dates, aggregation, histogram -> assertNull(histogram) @@ -99,7 +115,10 @@ public void testNoDocs() throws IOException { public void testAggregateWrongField() throws IOException { testBothCases(new MatchAllDocsQuery(), dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field("wrong_field"), - histogram -> assertEquals(0, histogram.getBuckets().size()) + histogram -> { + assertEquals(0, histogram.getBuckets().size()); + assertFalse(AggregationInspectionHelper.hasValue(histogram)); + } ); } @@ -371,39 +390,39 @@ public void testMaxBucket() throws IOException { private void testSearchCase(Query query, List dataset, Consumer configure, - Consumer verify) throws IOException { + Consumer verify) throws IOException { testSearchCase(query, dataset, configure, verify, 10000); } private void testSearchCase(Query query, List dataset, Consumer configure, - Consumer verify, + Consumer verify, int maxBucket) throws IOException { executeTestCase(false, query, dataset, configure, verify, maxBucket); } private void testSearchAndReduceCase(Query query, List dataset, Consumer configure, - Consumer verify) throws IOException { + Consumer verify) throws IOException { testSearchAndReduceCase(query, dataset, configure, verify, 1000); } private void testSearchAndReduceCase(Query query, List dataset, Consumer configure, - Consumer verify, + Consumer verify, int maxBucket) throws IOException { executeTestCase(true, query, dataset, configure, verify, maxBucket); } private void testBothCases(Query query, List dataset, Consumer configure, - Consumer verify) throws IOException { + Consumer verify) throws IOException { testBothCases(query, dataset, configure, verify, 10000); } private void testBothCases(Query query, List dataset, Consumer configure, - Consumer verify, + Consumer verify, int maxBucket) throws IOException { testSearchCase(query, dataset, configure, verify, maxBucket); testSearchAndReduceCase(query, dataset, configure, verify, maxBucket); @@ -411,7 +430,7 @@ private void testBothCases(Query query, List dataset, private void executeTestCase(boolean reduced, Query query, List dataset, Consumer configure, - Consumer verify, + Consumer verify, int maxBucket) throws IOException { try (Directory directory = newDirectory()) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorTests.java index 81ffd1c78a3f7..bdb99a4971ac1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; public class HistogramAggregatorTests extends AggregatorTestCase { @@ -49,7 +50,7 @@ public void testLongs() throws Exception { fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - Histogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertEquals(4, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); @@ -59,6 +60,7 @@ public void testLongs() throws Exception { assertEquals(2, histogram.getBuckets().get(2).getDocCount()); assertEquals(50d, histogram.getBuckets().get(3).getKey()); assertEquals(1, histogram.getBuckets().get(3).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); } } } @@ -79,7 +81,7 @@ public void testDoubles() throws Exception { fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - Histogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertEquals(4, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); @@ -89,6 +91,7 @@ public void testDoubles() throws Exception { assertEquals(2, histogram.getBuckets().get(2).getDocCount()); assertEquals(50d, histogram.getBuckets().get(3).getKey()); assertEquals(1, histogram.getBuckets().get(3).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); } } } @@ -109,7 +112,7 @@ public void testIrrationalInterval() throws Exception { fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - Histogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertEquals(4, histogram.getBuckets().size()); assertEquals(-4 * Math.PI, histogram.getBuckets().get(0).getKey()); assertEquals(1, histogram.getBuckets().get(0).getDocCount()); @@ -119,6 +122,7 @@ public void testIrrationalInterval() throws Exception { assertEquals(2, histogram.getBuckets().get(2).getDocCount()); assertEquals(Math.PI, histogram.getBuckets().get(3).getKey()); assertEquals(1, histogram.getBuckets().get(3).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); } } } @@ -140,12 +144,13 @@ public void testMinDocCount() throws Exception { fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - Histogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertEquals(2, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); assertEquals(0d, histogram.getBuckets().get(1).getKey()); assertEquals(3, histogram.getBuckets().get(1).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); } } } @@ -168,7 +173,7 @@ public void testMissing() throws Exception { fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - Histogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertEquals(4, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); @@ -178,6 +183,7 @@ public void testMissing() throws Exception { assertEquals(2, histogram.getBuckets().get(2).getDocCount()); assertEquals(50d, histogram.getBuckets().get(3).getKey()); assertEquals(1, histogram.getBuckets().get(3).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); } } } @@ -199,7 +205,7 @@ public void testOffset() throws Exception { fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - Histogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertEquals(3, histogram.getBuckets().size()); assertEquals(-10 + Math.PI, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); @@ -207,6 +213,7 @@ public void testOffset() throws Exception { assertEquals(2, histogram.getBuckets().get(1).getDocCount()); assertEquals(5 + Math.PI, histogram.getBuckets().get(2).getKey()); assertEquals(1, histogram.getBuckets().get(2).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); } } } @@ -228,7 +235,7 @@ public void testExtendedBounds() throws Exception { fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - Histogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertEquals(6, histogram.getBuckets().size()); assertEquals(-15d, histogram.getBuckets().get(0).getKey()); assertEquals(0, histogram.getBuckets().get(0).getDocCount()); @@ -242,6 +249,7 @@ public void testExtendedBounds() throws Exception { assertEquals(0, histogram.getBuckets().get(4).getDocCount()); assertEquals(10d, histogram.getBuckets().get(5).getKey()); assertEquals(0, histogram.getBuckets().get(5).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java index 424c3aed2105d..daaeb94d8fae9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.elasticsearch.search.aggregations.support.ValueType; import java.io.IOException; @@ -45,7 +46,10 @@ public void testMatchNoDocs() throws IOException { "field", Queries.newMatchAllQuery(), doc -> doc.add(new SortedNumericDocValuesField("field", randomLong())), - internalMissing -> assertEquals(internalMissing.getDocCount(), 0)); + internalMissing -> { + assertEquals(internalMissing.getDocCount(), 0); + assertFalse(AggregationInspectionHelper.hasValue(internalMissing)); + }); } public void testMatchAllDocs() throws IOException { @@ -54,7 +58,10 @@ public void testMatchAllDocs() throws IOException { "field", Queries.newMatchAllQuery(), doc -> doc.add(new SortedNumericDocValuesField("another_field", randomLong())), - internalMissing -> assertEquals(internalMissing.getDocCount(), numDocs)); + internalMissing -> { + assertEquals(internalMissing.getDocCount(), numDocs); + assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); + }); } public void testMatchSparse() throws IOException { @@ -74,6 +81,7 @@ public void testMatchSparse() throws IOException { internalMissing -> { assertEquals(internalMissing.getDocCount(), count.get()); count.set(0); + assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); }); } @@ -87,6 +95,7 @@ public void testMissingField() throws IOException { }, internalMissing -> { assertEquals(internalMissing.getDocCount(), numDocs); + assertTrue(AggregationInspectionHelper.hasValue(internalMissing)); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index e9911a7034c2f..1eef8de86b304 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -63,6 +63,7 @@ import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.InternalSum; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.test.VersionUtils; @@ -115,16 +116,16 @@ public void testNoDocs() throws IOException { NumberFieldMapper.NumberType.LONG); fieldType.setName(VALUE_FIELD_NAME); - Nested nested = search(newSearcher(indexReader, false, true), + InternalNested nested = search(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, fieldType); assertEquals(NESTED_AGG, nested.getName()); assertEquals(0, nested.getDocCount()); - InternalMax max = (InternalMax) - ((InternalAggregation)nested).getProperty(MAX_AGG_NAME); + InternalMax max = (InternalMax) nested.getProperty(MAX_AGG_NAME); assertEquals(MAX_AGG_NAME, max.getName()); assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), Double.MIN_VALUE); + assertFalse(AggregationInspectionHelper.hasValue(nested)); } } } @@ -162,17 +163,18 @@ public void testSingleNestingMax() throws IOException { NumberFieldMapper.NumberType.LONG); fieldType.setName(VALUE_FIELD_NAME); - Nested nested = search(newSearcher(indexReader, false, true), + InternalNested nested = search(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, fieldType); assertEquals(expectedNestedDocs, nested.getDocCount()); assertEquals(NESTED_AGG, nested.getName()); assertEquals(expectedNestedDocs, nested.getDocCount()); - InternalMax max = (InternalMax) - ((InternalAggregation)nested).getProperty(MAX_AGG_NAME); + InternalMax max = (InternalMax) nested.getProperty(MAX_AGG_NAME); assertEquals(MAX_AGG_NAME, max.getName()); assertEquals(expectedMaxValue, max.getValue(), Double.MIN_VALUE); + + assertTrue(AggregationInspectionHelper.hasValue(nested)); } } } @@ -211,17 +213,18 @@ public void testDoubleNestingMax() throws IOException { NumberFieldMapper.NumberType.LONG); fieldType.setName(VALUE_FIELD_NAME); - Nested nested = search(newSearcher(indexReader, false, true), + InternalNested nested = search(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, fieldType); assertEquals(expectedNestedDocs, nested.getDocCount()); assertEquals(NESTED_AGG, nested.getName()); assertEquals(expectedNestedDocs, nested.getDocCount()); - InternalMax max = (InternalMax) - ((InternalAggregation)nested).getProperty(MAX_AGG_NAME); + InternalMax max = (InternalMax) nested.getProperty(MAX_AGG_NAME); assertEquals(MAX_AGG_NAME, max.getName()); assertEquals(expectedMaxValue, max.getValue(), Double.MIN_VALUE); + + assertTrue(AggregationInspectionHelper.hasValue(nested)); } } } @@ -263,7 +266,7 @@ public void testOrphanedDocs() throws IOException { NumberFieldMapper.NumberType.LONG); fieldType.setName(VALUE_FIELD_NAME); - Nested nested = search(newSearcher(indexReader, false, true), + InternalNested nested = search(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, fieldType); assertEquals(expectedNestedDocs, nested.getDocCount()); @@ -348,13 +351,15 @@ public void testResetRootDocId() throws Exception { bq.add(Queries.newNonNestedFilter(VersionUtils.randomVersion(random())), BooleanClause.Occur.MUST); bq.add(new TermQuery(new Term(IdFieldMapper.NAME, Uid.encodeId("2"))), BooleanClause.Occur.MUST_NOT); - Nested nested = search(newSearcher(indexReader, false, true), + InternalNested nested = search(newSearcher(indexReader, false, true), new ConstantScoreQuery(bq.build()), nestedBuilder, fieldType); assertEquals(NESTED_AGG, nested.getName()); // The bug manifests if 6 docs are returned, because currentRootDoc isn't reset the previous child docs from the first // segment are emitted as hits. assertEquals(4L, nested.getDocCount()); + + assertTrue(AggregationInspectionHelper.hasValue(nested)); } } } @@ -630,7 +635,7 @@ public void testPreGetChildLeafCollectors() throws IOException { assertEquals("filterAgg", filter.getName()); assertEquals(3L, filter.getDocCount()); - Nested nested = filter.getAggregations().get(NESTED_AGG); + InternalNested nested = filter.getAggregations().get(NESTED_AGG); assertEquals(6L, nested.getDocCount()); StringTerms keyAgg = nested.getAggregations().get("key"); @@ -687,7 +692,7 @@ public void testFieldAlias() throws IOException { NestedAggregationBuilder aliasAgg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias")); - Nested nested = search(newSearcher(indexReader, false, true), + InternalNested nested = search(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), agg, fieldType); Nested aliasNested = search(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), aliasAgg, fieldType); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java index e446dfb3d2b9a..52988764d3bc3 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.metrics.Min; import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import java.io.IOException; @@ -75,10 +76,11 @@ public void testSampler() throws IOException { try (IndexReader reader = DirectoryReader.open(w)) { assertEquals("test expects a single segment", 1, reader.leaves().size()); IndexSearcher searcher = new IndexSearcher(reader); - Sampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "good")), aggBuilder, textFieldType, + InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "good")), aggBuilder, textFieldType, numericFieldType); Min min = sampler.getAggregations().get("min"); assertEquals(5.0, min.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(sampler)); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java index dbff6daed6285..123bb0f60427c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java @@ -37,8 +37,9 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.bucket.sampler.Sampler; +import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler; import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import java.io.IOException; import java.util.Arrays; @@ -88,7 +89,7 @@ public void testSignificance() throws IOException { IndexSearcher searcher = new IndexSearcher(reader); // Search "odd" which should have no duplication - Sampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), aggBuilder, textFieldType); + InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), aggBuilder, textFieldType); SignificantTerms terms = sampler.getAggregations().get("sig_text"); assertNull(terms.getBucketByKey("even")); @@ -109,6 +110,7 @@ public void testSignificance() throws IOException { assertNotNull(terms.getBucketByKey("even")); + assertTrue(AggregationInspectionHelper.hasValue(sampler)); } } } @@ -142,8 +144,9 @@ public void testFieldAlias() throws IOException { SamplerAggregationBuilder samplerAgg = sampler("sampler").subAggregation(agg); SamplerAggregationBuilder aliasSamplerAgg = sampler("sampler").subAggregation(aliasAgg); - Sampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), samplerAgg, textFieldType); - Sampler aliasSampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), aliasSamplerAgg, textFieldType); + InternalSampler sampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), samplerAgg, textFieldType); + InternalSampler aliasSampler = searchAndReduce(searcher, new TermQuery(new Term("text", "odd")), + aliasSamplerAgg, textFieldType); SignificantTerms terms = sampler.getAggregations().get("sig_text"); SignificantTerms aliasTerms = aliasSampler.getAggregations().get("sig_text"); @@ -157,6 +160,9 @@ public void testFieldAlias() throws IOException { aliasTerms = aliasSampler.getAggregations().get("sig_text"); assertFalse(terms.getBuckets().isEmpty()); assertEquals(terms, aliasTerms); + + assertTrue(AggregationInspectionHelper.hasValue(sampler)); + assertTrue(AggregationInspectionHelper.hasValue(aliasSampler)); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index d8948c1061369..8acf78d301af5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -73,6 +73,7 @@ import org.elasticsearch.search.aggregations.metrics.InternalTopHits; import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketScriptPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder; @@ -199,6 +200,7 @@ public void testSimple() throws Exception { assertEquals(1L, result.getBuckets().get(3).getDocCount()); assertEquals("d", result.getBuckets().get(4).getKeyAsString()); assertEquals(1L, result.getBuckets().get(4).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue((InternalTerms)result)); } } } @@ -278,6 +280,7 @@ public void testStringIncludeExclude() throws Exception { assertEquals(1L, result.getBuckets().get(8).getDocCount()); assertEquals("val009", result.getBuckets().get(9).getKeyAsString()); assertEquals(1L, result.getBuckets().get(9).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue((InternalTerms)result)); MappedFieldType fieldType2 = new KeywordFieldMapper.KeywordFieldType(); fieldType2.setName("sv_field"); @@ -304,6 +307,7 @@ public void testStringIncludeExclude() throws Exception { assertEquals(1L, result.getBuckets().get(3).getDocCount()); assertEquals("val009", result.getBuckets().get(4).getKeyAsString()); assertEquals(1L, result.getBuckets().get(4).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue((InternalTerms)result)); aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING) .executionHint(executionHint) @@ -333,6 +337,7 @@ public void testStringIncludeExclude() throws Exception { assertEquals(1L, result.getBuckets().get(6).getDocCount()); assertEquals("val009", result.getBuckets().get(7).getKeyAsString()); assertEquals(1L, result.getBuckets().get(7).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue((InternalTerms)result)); aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING) .executionHint(executionHint) @@ -349,6 +354,7 @@ public void testStringIncludeExclude() throws Exception { assertEquals(1L, result.getBuckets().get(0).getDocCount()); assertEquals("val011", result.getBuckets().get(1).getKeyAsString()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue((InternalTerms)result)); aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING) .executionHint(executionHint) @@ -365,6 +371,7 @@ public void testStringIncludeExclude() throws Exception { assertEquals(1L, result.getBuckets().get(0).getDocCount()); assertEquals("val010", result.getBuckets().get(1).getKeyAsString()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue((InternalTerms)result)); aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING) .executionHint(executionHint) @@ -382,6 +389,7 @@ public void testStringIncludeExclude() throws Exception { assertEquals(1L, result.getBuckets().get(0).getDocCount()); assertEquals("val010", result.getBuckets().get(1).getKeyAsString()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue((InternalTerms)result)); } } } @@ -436,6 +444,7 @@ public void testNumericIncludeExclude() throws Exception { assertEquals(1L, result.getBuckets().get(0).getDocCount()); assertEquals(5L, result.getBuckets().get(1).getKey()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue((InternalTerms)result)); aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.LONG) .executionHint(executionHint) @@ -456,6 +465,7 @@ public void testNumericIncludeExclude() throws Exception { assertEquals(1L, result.getBuckets().get(2).getDocCount()); assertEquals(4L, result.getBuckets().get(3).getKey()); assertEquals(1L, result.getBuckets().get(3).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue((InternalTerms)result)); fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); fieldType.setName("double_field"); @@ -475,6 +485,7 @@ public void testNumericIncludeExclude() throws Exception { assertEquals(1L, result.getBuckets().get(0).getDocCount()); assertEquals(5.0, result.getBuckets().get(1).getKey()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue((InternalTerms)result)); aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.DOUBLE) .executionHint(executionHint) @@ -495,6 +506,7 @@ public void testNumericIncludeExclude() throws Exception { assertEquals(1L, result.getBuckets().get(2).getDocCount()); assertEquals(4.0, result.getBuckets().get(3).getKey()); assertEquals(1L, result.getBuckets().get(3).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue((InternalTerms)result)); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java index b83acfcba80ec..3e86571ae45e9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java @@ -35,9 +35,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.AvgAggregator; -import org.elasticsearch.search.aggregations.metrics.InternalAvg; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import java.io.IOException; import java.util.Arrays; @@ -52,6 +50,7 @@ public void testNoDocs() throws IOException { // Intentionally not writing any docs }, avg -> { assertEquals(Double.NaN, avg.getValue(), 0); + assertFalse(AggregationInspectionHelper.hasValue(avg)); }); } @@ -61,6 +60,7 @@ public void testNoMatchingField() throws IOException { iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 3))); }, avg -> { assertEquals(Double.NaN, avg.getValue(), 0); + assertFalse(AggregationInspectionHelper.hasValue(avg)); }); } @@ -71,6 +71,7 @@ public void testSomeMatchesSortedNumericDocValues() throws IOException { iw.addDocument(singleton(new SortedNumericDocValuesField("number", 3))); }, avg -> { assertEquals(4, avg.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(avg)); }); } @@ -81,6 +82,7 @@ public void testSomeMatchesNumericDocValues() throws IOException { iw.addDocument(singleton(new NumericDocValuesField("number", 3))); }, avg -> { assertEquals(4, avg.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(avg)); }); } @@ -91,6 +93,7 @@ public void testQueryFiltering() throws IOException { iw.addDocument(Arrays.asList(new IntPoint("number", 3), new SortedNumericDocValuesField("number", 3))); }, avg -> { assertEquals(2.5, avg.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(avg)); }); } @@ -101,6 +104,7 @@ public void testQueryFiltersAll() throws IOException { iw.addDocument(Arrays.asList(new IntPoint("number", 3), new SortedNumericDocValuesField("number", 7))); }, avg -> { assertEquals(Double.NaN, avg.getValue(), 0); + assertFalse(AggregationInspectionHelper.hasValue(avg)); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java index a2789a9ef1648..4f5a8bb1ea484 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.elasticsearch.search.aggregations.support.ValueType; import java.io.IOException; @@ -48,6 +49,7 @@ public void testNoDocs() throws IOException { // Intentionally not writing any docs }, card -> { assertEquals(0.0, card.getValue(), 0); + assertFalse(AggregationInspectionHelper.hasValue(card)); }); } @@ -57,6 +59,7 @@ public void testNoMatchingField() throws IOException { iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 1))); }, card -> { assertEquals(0.0, card.getValue(), 0); + assertFalse(AggregationInspectionHelper.hasValue(card)); }); } @@ -66,6 +69,7 @@ public void testSomeMatchesSortedNumericDocValues() throws IOException { iw.addDocument(singleton(new SortedNumericDocValuesField("number", 1))); }, card -> { assertEquals(2, card.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(card)); }); } @@ -75,6 +79,7 @@ public void testSomeMatchesNumericDocValues() throws IOException { iw.addDocument(singleton(new NumericDocValuesField("number", 1))); }, card -> { assertEquals(2, card.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(card)); }); } @@ -86,6 +91,7 @@ public void testQueryFiltering() throws IOException { new SortedNumericDocValuesField("number", 1))); }, card -> { assertEquals(1, card.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(card)); }); } @@ -97,6 +103,7 @@ public void testQueryFiltersAll() throws IOException { new SortedNumericDocValuesField("number", 1))); }, card -> { assertEquals(0.0, card.getValue(), 0); + assertFalse(AggregationInspectionHelper.hasValue(card)); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java index e65d1269520bc..ca26ba1b20672 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import java.io.IOException; import java.util.function.Consumer; @@ -55,6 +56,7 @@ public void testEmpty() throws IOException { assertEquals(Double.NaN, stats.getVariance(), 0); assertEquals(Double.NaN, stats.getStdDeviation(), 0); assertEquals(0d, stats.getSumOfSquares(), 0); + assertFalse(AggregationInspectionHelper.hasValue(stats)); } ); } @@ -92,6 +94,7 @@ public void testRandomDoubles() throws IOException { stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER), TOLERANCE); assertEquals(expected.stdDevBound(ExtendedStats.Bounds.UPPER, stats.getSigma()), stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER), TOLERANCE); + assertTrue(AggregationInspectionHelper.hasValue(stats)); } ); } @@ -128,6 +131,7 @@ public void testRandomLongs() throws IOException { stats.getStdDeviationBound(ExtendedStats.Bounds.LOWER), TOLERANCE); assertEquals(expected.stdDevBound(ExtendedStats.Bounds.UPPER, stats.getSigma()), stats.getStdDeviationBound(ExtendedStats.Bounds.UPPER), TOLERANCE); + assertTrue(AggregationInspectionHelper.hasValue(stats)); } ); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java index b171e7436eee4..562d29416dcd8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.elasticsearch.test.geo.RandomGeoGenerator; import static org.elasticsearch.search.aggregations.metrics.InternalGeoBoundsTests.GEOHASH_TOLERANCE; @@ -55,6 +56,7 @@ public void testEmpty() throws Exception { assertTrue(Double.isInfinite(bounds.posRight)); assertTrue(Double.isInfinite(bounds.negLeft)); assertTrue(Double.isInfinite(bounds.negRight)); + assertFalse(AggregationInspectionHelper.hasValue(bounds)); } } } @@ -112,6 +114,7 @@ public void testRandom() throws Exception { assertThat(bounds.posRight, closeTo(posRight, GEOHASH_TOLERANCE)); assertThat(bounds.negRight, closeTo(negRight, GEOHASH_TOLERANCE)); assertThat(bounds.negLeft, closeTo(negLeft, GEOHASH_TOLERANCE)); + assertTrue(AggregationInspectionHelper.hasValue(bounds)); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java index 3865070741258..303ed65f44856 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.metrics.GeoCentroidAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.InternalGeoCentroid; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.elasticsearch.test.geo.RandomGeoGenerator; import java.io.IOException; @@ -52,6 +53,7 @@ public void testEmpty() throws Exception { IndexSearcher searcher = new IndexSearcher(reader); InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertNull(result.centroid()); + assertFalse(AggregationInspectionHelper.hasValue(result)); } } } @@ -79,6 +81,7 @@ public void testUnmapped() throws Exception { fieldType.setName("field"); result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertNull(result.centroid()); + assertFalse(AggregationInspectionHelper.hasValue(result)); } } } @@ -149,6 +152,7 @@ private void assertCentroid(RandomIndexWriter w, GeoPoint expectedCentroid) thro assertNotNull(centroid); assertEquals(expectedCentroid.getLat(), centroid.getLat(), GEOHASH_TOLERANCE); assertEquals(expectedCentroid.getLon(), centroid.getLon(), GEOHASH_TOLERANCE); + assertTrue(AggregationInspectionHelper.hasValue(result)); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java index 52bd6a37e6f6f..9d9c74f283b45 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.search.aggregations.metrics.PercentileRanks; import org.elasticsearch.search.aggregations.metrics.PercentileRanksAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.PercentilesMethod; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.hamcrest.Matchers; import java.io.IOException; @@ -55,6 +56,7 @@ public void testEmpty() throws IOException { Percentile rank = ranks.iterator().next(); assertEquals(Double.NaN, rank.getPercent(), 0d); assertEquals(0.5, rank.getValue(), 0d); + assertFalse(AggregationInspectionHelper.hasValue((InternalHDRPercentileRanks)ranks)); } } @@ -87,6 +89,7 @@ public void testSimple() throws IOException { assertEquals(12, rank.getValue(), 0d); assertThat(rank.getPercent(), Matchers.equalTo(100d)); assertFalse(rankIterator.hasNext()); + assertTrue(AggregationInspectionHelper.hasValue((InternalHDRPercentileRanks)ranks)); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java index b68b68dd544ea..f08a89657c63b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentiles; import org.elasticsearch.search.aggregations.metrics.PercentilesAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.PercentilesMethod; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import java.io.IOException; import java.util.function.Consumer; @@ -52,6 +53,7 @@ public void testNoDocs() throws IOException { // Intentionally not writing any docs }, hdr -> { assertEquals(0L, hdr.state.getTotalCount()); + assertFalse(AggregationInspectionHelper.hasValue(hdr)); }); } @@ -61,6 +63,7 @@ public void testNoMatchingField() throws IOException { iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 1))); }, hdr -> { assertEquals(0L, hdr.state.getTotalCount()); + assertFalse(AggregationInspectionHelper.hasValue(hdr)); }); } @@ -77,6 +80,7 @@ public void testSomeMatchesSortedNumericDocValues() throws IOException { assertEquals(20.0d, hdr.percentile(50), approximation); assertEquals(40.0d, hdr.percentile(75), approximation); assertEquals(60.0d, hdr.percentile(99), approximation); + assertTrue(AggregationInspectionHelper.hasValue(hdr)); }); } @@ -93,6 +97,7 @@ public void testSomeMatchesNumericDocValues() throws IOException { assertEquals(20.0d, hdr.percentile(50), approximation); assertEquals(40.0d, hdr.percentile(75), approximation); assertEquals(60.0d, hdr.percentile(99), approximation); + assertTrue(AggregationInspectionHelper.hasValue(hdr)); }); } @@ -107,10 +112,12 @@ public void testQueryFiltering() throws IOException { testCase(LongPoint.newRangeQuery("row", 0, 2), docs, hdr -> { assertEquals(2L, hdr.state.getTotalCount()); assertEquals(10.0d, hdr.percentile(randomDoubleBetween(1, 50, true)), 0.05d); + assertTrue(AggregationInspectionHelper.hasValue(hdr)); }); testCase(LongPoint.newRangeQuery("row", 5, 10), docs, hdr -> { assertEquals(0L, hdr.state.getTotalCount()); + assertFalse(AggregationInspectionHelper.hasValue(hdr)); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java index 013f951cc3f2c..d962178661272 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java @@ -49,6 +49,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import java.io.IOException; import java.util.ArrayList; @@ -69,6 +70,7 @@ public void testNoDocs() throws IOException { // Intentionally not writing any docs }, max -> { assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), 0); + assertFalse(AggregationInspectionHelper.hasValue(max)); }); } @@ -78,6 +80,7 @@ public void testNoMatchingField() throws IOException { iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 1))); }, max -> { assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), 0); + assertFalse(AggregationInspectionHelper.hasValue(max)); }); } @@ -87,6 +90,7 @@ public void testSomeMatchesSortedNumericDocValues() throws IOException { iw.addDocument(singleton(new SortedNumericDocValuesField("number", 1))); }, max -> { assertEquals(7, max.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(max)); }); } @@ -96,6 +100,7 @@ public void testSomeMatchesNumericDocValues() throws IOException { iw.addDocument(singleton(new NumericDocValuesField("number", 1))); }, max -> { assertEquals(7, max.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(max)); }); } @@ -105,6 +110,7 @@ public void testQueryFiltering() throws IOException { iw.addDocument(Arrays.asList(new IntPoint("number", 1), new SortedNumericDocValuesField("number", 1))); }, max -> { assertEquals(1, max.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(max)); }); } @@ -114,6 +120,7 @@ public void testQueryFiltersAll() throws IOException { iw.addDocument(Arrays.asList(new IntPoint("number", 1), new SortedNumericDocValuesField("number", 1))); }, max -> { assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), 0); + assertFalse(AggregationInspectionHelper.hasValue(max)); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java index d47d9006b7f38..55cf9b16e1688 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.hamcrest.Description; import org.hamcrest.TypeSafeMatcher; @@ -71,7 +72,10 @@ private static CheckedConsumer {}, agg -> assertThat(agg.getMedianAbsoluteDeviation(), equalTo(Double.NaN))); + testCase(new MatchAllDocsQuery(), writer -> {}, agg -> { + assertThat(agg.getMedianAbsoluteDeviation(), equalTo(Double.NaN)); + assertFalse(AggregationInspectionHelper.hasValue(agg)); + }); } public void testNoMatchingField() throws IOException { @@ -81,7 +85,10 @@ public void testNoMatchingField() throws IOException { writer.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 1))); writer.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 2))); }, - agg -> assertThat(agg.getMedianAbsoluteDeviation(), equalTo(Double.NaN)) + agg -> { + assertThat(agg.getMedianAbsoluteDeviation(), equalTo(Double.NaN)); + assertFalse(AggregationInspectionHelper.hasValue(agg)); + } ); } @@ -94,7 +101,10 @@ public void testSomeMatchesSortedNumericDocValues() throws IOException { sample.add(point); return singleton(new SortedNumericDocValuesField("number", point)); }), - agg -> assertThat(agg.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(sample))) + agg -> { + assertThat(agg.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(sample))); + assertTrue(AggregationInspectionHelper.hasValue(agg)); + } ); } @@ -107,7 +117,10 @@ public void testSomeMatchesNumericDocValues() throws IOException { sample.add(point); return singleton(new NumericDocValuesField("number", point)); }), - agg -> assertThat(agg.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(sample))) + agg -> { + assertThat(agg.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(sample))); + assertTrue(AggregationInspectionHelper.hasValue(agg)); + } ); } @@ -123,7 +136,10 @@ public void testQueryFiltering() throws IOException { writer.addDocument(Arrays.asList(new IntPoint("number", point), new SortedNumericDocValuesField("number", point))); } }, - agg -> assertThat(agg.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(filteredSample))) + agg -> { + assertThat(agg.getMedianAbsoluteDeviation(), closeToRelative(calculateMAD(filteredSample))); + assertTrue(AggregationInspectionHelper.hasValue(agg)); + } ); } @@ -134,7 +150,10 @@ public void testQueryFiltersAll() throws IOException { writer.addDocument(Arrays.asList(new IntPoint("number", 1), new SortedNumericDocValuesField("number", 1))); writer.addDocument(Arrays.asList(new IntPoint("number", 2), new SortedNumericDocValuesField("number", 2))); }, - agg -> assertThat(agg.getMedianAbsoluteDeviation(), equalTo(Double.NaN)) + agg -> { + assertThat(agg.getMedianAbsoluteDeviation(), equalTo(Double.NaN)); + assertFalse(AggregationInspectionHelper.hasValue(agg)); + } ); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java index ad897a2ef3264..cfe3c86034f85 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java @@ -50,6 +50,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.elasticsearch.search.aggregations.support.FieldContext; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; @@ -116,6 +117,7 @@ public void testMinAggregator_numericDv() throws Exception { aggregator.postCollection(); InternalMin result = (InternalMin) aggregator.buildAggregation(0L); assertEquals(-1.0, result.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(result)); indexReader.close(); directory.close(); @@ -157,6 +159,7 @@ public void testMinAggregator_sortedNumericDv() throws Exception { aggregator.postCollection(); InternalMin result = (InternalMin) aggregator.buildAggregation(0L); assertEquals(-1.0, result.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(result)); indexReader.close(); directory.close(); @@ -189,6 +192,7 @@ public void testMinAggregator_noValue() throws Exception { aggregator.postCollection(); InternalMin result = (InternalMin) aggregator.buildAggregation(0L); assertEquals(Double.POSITIVE_INFINITY, result.getValue(), 0); + assertFalse(AggregationInspectionHelper.hasValue(result)); indexReader.close(); directory.close(); @@ -212,6 +216,7 @@ public void testMinAggregator_noDocs() throws Exception { aggregator.postCollection(); InternalMin result = (InternalMin) aggregator.buildAggregation(0L); assertEquals(Double.POSITIVE_INFINITY, result.getValue(), 0); + assertFalse(AggregationInspectionHelper.hasValue(result)); indexReader.close(); directory.close(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java index 52a45f9c017d1..28b1514545506 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import java.io.IOException; import java.util.function.Consumer; @@ -51,6 +52,7 @@ public void testEmpty() throws IOException { assertEquals(Float.NaN, stats.getAvg(), 0); assertEquals(Double.POSITIVE_INFINITY, stats.getMin(), 0); assertEquals(Double.NEGATIVE_INFINITY, stats.getMax(), 0); + assertFalse(AggregationInspectionHelper.hasValue(stats)); } ); } @@ -81,6 +83,7 @@ public void testRandomDoubles() throws IOException { assertEquals(expected.min, stats.getMin(), 0); assertEquals(expected.max, stats.getMax(), 0); assertEquals(expected.sum / expected.count, stats.getAvg(), TOLERANCE); + assertTrue(AggregationInspectionHelper.hasValue(stats)); } ); } @@ -110,6 +113,7 @@ public void testRandomLongs() throws IOException { assertEquals(expected.min, stats.getMin(), 0); assertEquals(expected.max, stats.getMax(), 0); assertEquals(expected.sum / expected.count, stats.getAvg(), TOLERANCE); + assertTrue(AggregationInspectionHelper.hasValue(stats)); } ); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java index eb57bc9a5115c..ff76aa4d0edef 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import java.io.IOException; import java.util.Arrays; @@ -53,14 +54,20 @@ public class SumAggregatorTests extends AggregatorTestCase { public void testNoDocs() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { // Intentionally not writing any docs - }, count -> assertEquals(0L, count.getValue(), 0d)); + }, count -> { + assertEquals(0L, count.getValue(), 0d); + assertFalse(AggregationInspectionHelper.hasValue(count)); + }); } public void testNoMatchingField() throws IOException { testCase(new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new NumericDocValuesField("wrong_number", 7))); iw.addDocument(singleton(new NumericDocValuesField("wrong_number", 1))); - }, count -> assertEquals(0L, count.getValue(), 0d)); + }, count -> { + assertEquals(0L, count.getValue(), 0d); + assertFalse(AggregationInspectionHelper.hasValue(count)); + }); } public void testNumericDocValues() throws IOException { @@ -81,7 +88,10 @@ public void testNumericDocValues() throws IOException { iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 2))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 1))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 2))); - }, count -> assertEquals(24L, count.getValue(), 0d)); + }, count -> { + assertEquals(24L, count.getValue(), 0d); + assertTrue(AggregationInspectionHelper.hasValue(count)); + }); } public void testSortedNumericDocValues() throws IOException { @@ -91,7 +101,10 @@ public void testSortedNumericDocValues() throws IOException { iw.addDocument(Arrays.asList(new SortedNumericDocValuesField(FIELD_NAME, 3), new SortedNumericDocValuesField(FIELD_NAME, 4))); iw.addDocument(singleton(new SortedNumericDocValuesField(FIELD_NAME, 1))); - }, count -> assertEquals(15L, count.getValue(), 0d)); + }, count -> { + assertEquals(15L, count.getValue(), 0d); + assertTrue(AggregationInspectionHelper.hasValue(count)); + }); } public void testQueryFiltering() throws IOException { @@ -101,14 +114,20 @@ public void testQueryFiltering() throws IOException { iw.addDocument(Arrays.asList(new StringField("match", "yes", Field.Store.NO), new NumericDocValuesField(FIELD_NAME, 3))); iw.addDocument(Arrays.asList(new StringField("match", "no", Field.Store.NO), new NumericDocValuesField(FIELD_NAME, 4))); iw.addDocument(Arrays.asList(new StringField("match", "yes", Field.Store.NO), new NumericDocValuesField(FIELD_NAME, 5))); - }, count -> assertEquals(9L, count.getValue(), 0d)); + }, count -> { + assertEquals(9L, count.getValue(), 0d); + assertTrue(AggregationInspectionHelper.hasValue(count)); + }); } public void testStringField() throws IOException { IllegalStateException e = expectThrows(IllegalStateException.class, () -> { testCase(new MatchAllDocsQuery(), iw -> { iw.addDocument(singleton(new SortedDocValuesField(FIELD_NAME, new BytesRef("1")))); - }, count -> assertEquals(0L, count.getValue(), 0d)); + }, count -> { + assertEquals(0L, count.getValue(), 0d); + assertFalse(AggregationInspectionHelper.hasValue(count)); + }); }); assertEquals("unexpected docvalues type SORTED for field 'field' (expected one of [SORTED_NUMERIC, NUMERIC]). " + "Re-index with correct docvalues type.", e.getMessage()); @@ -159,13 +178,13 @@ private void verifySummationOfDoubles(double[] values, double expected, double d private void testCase(Query query, CheckedConsumer indexer, - Consumer verify) throws IOException { + Consumer verify) throws IOException { testCase(query, indexer, verify, NumberFieldMapper.NumberType.LONG); } private void testCase(Query query, CheckedConsumer indexer, - Consumer verify, + Consumer verify, NumberFieldMapper.NumberType fieldNumberType) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { @@ -187,7 +206,7 @@ private void testCase(Query query, indexSearcher.search(query, aggregator); aggregator.postCollection(); - verify.accept((Sum) aggregator.buildAggregation(0L)); + verify.accept((InternalSum) aggregator.buildAggregation(0L)); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java index 363ba14198390..2541583e94580 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.search.aggregations.metrics.PercentileRanks; import org.elasticsearch.search.aggregations.metrics.PercentileRanksAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.PercentilesMethod; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.hamcrest.Matchers; import java.io.IOException; @@ -55,6 +56,7 @@ public void testEmpty() throws IOException { Percentile rank = ranks.iterator().next(); assertEquals(Double.NaN, rank.getPercent(), 0d); assertEquals(0.5, rank.getValue(), 0d); + assertFalse(AggregationInspectionHelper.hasValue(((InternalTDigestPercentileRanks)ranks))); } } @@ -91,6 +93,7 @@ public void testSimple() throws IOException { // https://github.com/elastic/elasticsearch/issues/14851 // assertThat(rank.getPercent(), Matchers.equalTo(100d)); assertFalse(rankIterator.hasNext()); + assertTrue(AggregationInspectionHelper.hasValue(((InternalTDigestPercentileRanks)ranks))); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java index 8a4f399cb2525..0b1692fa6133c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.search.aggregations.metrics.PercentilesAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.PercentilesMethod; import org.elasticsearch.search.aggregations.metrics.TDigestPercentilesAggregator; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import java.io.IOException; import java.util.function.Consumer; @@ -52,6 +53,7 @@ public void testNoDocs() throws IOException { // Intentionally not writing any docs }, tdigest -> { assertEquals(0L, tdigest.state.size()); + assertFalse(AggregationInspectionHelper.hasValue(tdigest)); }); } @@ -61,6 +63,7 @@ public void testNoMatchingField() throws IOException { iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 1))); }, tdigest -> { assertEquals(0L, tdigest.state.size()); + assertFalse(AggregationInspectionHelper.hasValue(tdigest)); }); } @@ -82,6 +85,7 @@ public void testSomeMatchesSortedNumericDocValues() throws IOException { assertEquals("2.0", tdigest.percentileAsString(50)); assertEquals(1.0d, tdigest.percentile(22), 0.0d); assertEquals("1.0", tdigest.percentileAsString(22)); + assertTrue(AggregationInspectionHelper.hasValue(tdigest)); }); } @@ -107,6 +111,7 @@ public void testSomeMatchesNumericDocValues() throws IOException { assertEquals("1.0", tdigest.percentileAsString(25)); assertEquals(0.0d, tdigest.percentile(1), 0.0d); assertEquals("0.0", tdigest.percentileAsString(1)); + assertTrue(AggregationInspectionHelper.hasValue(tdigest)); }); } @@ -127,11 +132,13 @@ public void testQueryFiltering() throws IOException { assertEquals(2.0d, tdigest.percentile(100), 0.0d); assertEquals(1.0d, tdigest.percentile(50), 0.0d); assertEquals(0.5d, tdigest.percentile(25), 0.0d); + assertTrue(AggregationInspectionHelper.hasValue(tdigest)); }); testCase(LongPoint.newRangeQuery("row", 100, 110), docs, tdigest -> { assertEquals(0L, tdigest.state.size()); assertEquals(0L, tdigest.state.centroidCount()); + assertFalse(AggregationInspectionHelper.hasValue(tdigest)); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java index 921f29c915f51..585cd7f9ff434 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java @@ -50,6 +50,7 @@ import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.TopHits; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; @@ -74,12 +75,14 @@ public void testTopLevel() throws Exception { assertEquals("type", searchHits.getAt(1).getType()); assertEquals("1", searchHits.getAt(2).getId()); assertEquals("type", searchHits.getAt(2).getType()); + assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits)result))); } public void testNoResults() throws Exception { TopHits result = (TopHits) testCase(new MatchNoDocsQuery(), topHits("_name").sort("string", SortOrder.DESC)); SearchHits searchHits = ((TopHits) result).getHits(); assertEquals(0L, searchHits.getTotalHits().value); + assertFalse(AggregationInspectionHelper.hasValue(((InternalTopHits)result))); } /** @@ -106,22 +109,26 @@ public void testInsideTerms() throws Exception { assertEquals(2L, searchHits.getTotalHits().value); assertEquals("2", searchHits.getAt(0).getId()); assertEquals("1", searchHits.getAt(1).getId()); + assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("a").getAggregations().get("top")))); // The "b" bucket searchHits = ((TopHits) terms.getBucketByKey("b").getAggregations().get("top")).getHits(); assertEquals(2L, searchHits.getTotalHits().value); assertEquals("3", searchHits.getAt(0).getId()); assertEquals("1", searchHits.getAt(1).getId()); + assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("b").getAggregations().get("top")))); // The "c" bucket searchHits = ((TopHits) terms.getBucketByKey("c").getAggregations().get("top")).getHits(); assertEquals(1L, searchHits.getTotalHits().value); assertEquals("2", searchHits.getAt(0).getId()); + assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("c").getAggregations().get("top")))); // The "d" bucket searchHits = ((TopHits) terms.getBucketByKey("d").getAggregations().get("top")).getHits(); assertEquals(1L, searchHits.getTotalHits().value); assertEquals("3", searchHits.getAt(0).getId()); + assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("d").getAggregations().get("top")))); } private static final MappedFieldType STRING_FIELD_TYPE = new KeywordFieldMapper.KeywordFieldType(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorTests.java index f9118e30a6efd..ea14d9ec671b2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorTests.java @@ -35,15 +35,13 @@ import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.IpFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.metrics.ValueCount; -import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ValueCountAggregator; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.elasticsearch.search.aggregations.support.ValueType; import java.io.IOException; @@ -60,7 +58,10 @@ public void testNoDocs() throws IOException { for (ValueType valueType : ValueType.values()) { testCase(new MatchAllDocsQuery(), valueType, iw -> { // Intentionally not writing any docs - }, count -> assertEquals(0L, count.getValue())); + }, count -> { + assertEquals(0L, count.getValue()); + assertFalse(AggregationInspectionHelper.hasValue(count)); + }); } } @@ -68,7 +69,10 @@ public void testNoMatchingField() throws IOException { testCase(new MatchAllDocsQuery(), ValueType.LONG, iw -> { iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 7))); iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 1))); - }, count -> assertEquals(0L, count.getValue())); + }, count -> { + assertEquals(0L, count.getValue()); + assertFalse(AggregationInspectionHelper.hasValue(count)); + }); } public void testSomeMatchesSortedNumericDocValues() throws IOException { @@ -76,14 +80,20 @@ public void testSomeMatchesSortedNumericDocValues() throws IOException { iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 7))); iw.addDocument(singleton(new SortedNumericDocValuesField(FIELD_NAME, 7))); iw.addDocument(singleton(new SortedNumericDocValuesField(FIELD_NAME, 1))); - }, count -> assertEquals(2L, count.getValue())); + }, count -> { + assertEquals(2L, count.getValue()); + assertTrue(AggregationInspectionHelper.hasValue(count)); + }); } public void testSomeMatchesNumericDocValues() throws IOException { testCase(new DocValuesFieldExistsQuery(FIELD_NAME), ValueType.NUMBER, iw -> { iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 7))); iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, 1))); - }, count -> assertEquals(2L, count.getValue())); + }, count -> { + assertEquals(2L, count.getValue()); + assertTrue(AggregationInspectionHelper.hasValue(count)); + }); } public void testQueryFiltering() throws IOException { @@ -93,20 +103,26 @@ public void testQueryFiltering() throws IOException { iw.addDocument(Arrays.asList(new IntPoint("level", 3), new SortedDocValuesField(FIELD_NAME, new BytesRef("foo")))); iw.addDocument(Arrays.asList(new IntPoint("level", 5), new SortedDocValuesField(FIELD_NAME, new BytesRef("baz")))); iw.addDocument(Arrays.asList(new IntPoint("level", 7), new SortedDocValuesField(FIELD_NAME, new BytesRef("baz")))); - }, count -> assertEquals(4L, count.getValue())); + }, count -> { + assertEquals(4L, count.getValue()); + assertTrue(AggregationInspectionHelper.hasValue(count)); + }); } public void testQueryFiltersAll() throws IOException { testCase(IntPoint.newRangeQuery("level", -1, 0), ValueType.STRING, iw -> { iw.addDocument(Arrays.asList(new IntPoint("level", 3), new SortedDocValuesField(FIELD_NAME, new BytesRef("foo")))); iw.addDocument(Arrays.asList(new IntPoint("level", 5), new SortedDocValuesField(FIELD_NAME, new BytesRef("baz")))); - }, count -> assertEquals(0L, count.getValue())); + }, count -> { + assertEquals(0L, count.getValue()); + assertFalse(AggregationInspectionHelper.hasValue(count)); + }); } private void testCase(Query query, ValueType valueType, CheckedConsumer indexer, - Consumer verify) throws IOException { + Consumer verify) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { @@ -127,7 +143,7 @@ private void testCase(Query query, aggregator.preCollection(); indexSearcher.search(query, aggregator); aggregator.postCollection(); - verify.accept((ValueCount) aggregator.buildAggregation(0L)); + verify.accept((InternalValueCount) aggregator.buildAggregation(0L)); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java index 3836f0cc2ae14..d0027208b104b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.search.aggregations.metrics.InternalWeightedAvg; import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregator; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig; import org.joda.time.DateTimeZone; @@ -63,6 +64,7 @@ public void testNoDocs() throws IOException { // Intentionally not writing any docs }, avg -> { assertEquals(Double.NaN, avg.getValue(), 0); + assertFalse(AggregationInspectionHelper.hasValue(avg)); }); } @@ -77,6 +79,7 @@ public void testNoMatchingField() throws IOException { iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 3))); }, avg -> { assertEquals(Double.NaN, avg.getValue(), 0); + assertFalse(AggregationInspectionHelper.hasValue(avg)); }); } @@ -95,6 +98,7 @@ public void testSomeMatchesSortedNumericDocValuesNoWeight() throws IOException { new SortedNumericDocValuesField("weight_field", 1))); }, avg -> { assertEquals(4, avg.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(avg)); }); } @@ -115,6 +119,7 @@ public void testSomeMatchesSortedNumericDocValuesWeights() throws IOException { }, avg -> { // (7*2 + 2*3 + 3*3) / (2+3+3) == 3.625 assertEquals(3.625, avg.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(avg)); }); } @@ -133,6 +138,7 @@ public void testSomeMatchesNumericDocValues() throws IOException { new SortedNumericDocValuesField("weight_field", 1))); }, avg -> { assertEquals(4, avg.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(avg)); }); } @@ -151,6 +157,7 @@ public void testQueryFiltering() throws IOException { new SortedNumericDocValuesField("weight_field", 1))); }, avg -> { assertEquals(2.5, avg.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(avg)); }); } @@ -170,6 +177,7 @@ public void testQueryFilteringWeights() throws IOException { }, avg -> { double value = (2.0*3.0 + 3.0*4.0) / (3.0+4.0); assertEquals(value, avg.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(avg)); }); } @@ -185,6 +193,7 @@ public void testQueryFiltersAll() throws IOException { iw.addDocument(Arrays.asList(new IntPoint("value_field", 3), new SortedNumericDocValuesField("value_field", 7))); }, avg -> { assertEquals(Double.NaN, avg.getValue(), 0); + assertFalse(AggregationInspectionHelper.hasValue(avg)); }); } @@ -203,6 +212,7 @@ public void testQueryFiltersAllWeights() throws IOException { new SortedNumericDocValuesField("weight_field", 4))); }, avg -> { assertEquals(Double.NaN, avg.getValue(), 0); + assertFalse(AggregationInspectionHelper.hasValue(avg)); }); } @@ -222,6 +232,7 @@ public void testValueSetMissing() throws IOException { }, avg -> { double value = (2.0*2.0 + 2.0*3.0 + 2.0*4.0) / (2.0+3.0+4.0); assertEquals(value, avg.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(avg)); }); } @@ -241,6 +252,7 @@ public void testWeightSetMissing() throws IOException { }, avg -> { double value = (2.0*2.0 + 3.0*2.0 + 4.0*2.0) / (2.0+2.0+2.0); assertEquals(value, avg.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(avg)); }); } @@ -311,6 +323,7 @@ public void testMultiValues() throws IOException { }, avg -> { double value = (((2.0+3.0)/2.0) + ((3.0+4.0)/2.0) + ((4.0+5.0)/2.0)) / (1.0+1.0+1.0); assertEquals(value, avg.getValue(), 0); + assertTrue(AggregationInspectionHelper.hasValue(avg)); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java index 37882bcdf5e44..b1eec2b0f48ca 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java @@ -47,6 +47,7 @@ import org.elasticsearch.search.aggregations.metrics.InternalAvg; import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import java.io.IOException; import java.util.Arrays; @@ -93,6 +94,7 @@ public void testSimple() throws IOException { for (Histogram.Bucket bucket : buckets) { sum += ((InternalAvg) (bucket.getAggregations().get("the_avg"))).value(); assertThat(((InternalSimpleValue) (bucket.getAggregations().get("cusum"))).value(), equalTo(sum)); + assertTrue(AggregationInspectionHelper.hasValue(((InternalAvg) (bucket.getAggregations().get("the_avg"))))); } }); } @@ -116,14 +118,37 @@ public void testDerivative() throws IOException { for (int i = 0; i < buckets.size(); i++) { if (i == 0) { assertThat(((InternalSimpleValue)(buckets.get(i).getAggregations().get("cusum"))).value(), equalTo(0.0)); + assertTrue(AggregationInspectionHelper.hasValue(((InternalSimpleValue) (buckets.get(i) + .getAggregations().get("cusum"))))); } else { sum += 1.0; assertThat(((InternalSimpleValue)(buckets.get(i).getAggregations().get("cusum"))).value(), equalTo(sum)); + assertTrue(AggregationInspectionHelper.hasValue(((InternalSimpleValue) (buckets.get(i) + .getAggregations().get("cusum"))))); } } }); } + public void testCount() throws IOException { + Query query = new MatchAllDocsQuery(); + + DateHistogramAggregationBuilder aggBuilder = new DateHistogramAggregationBuilder("histo"); + aggBuilder.dateHistogramInterval(DateHistogramInterval.DAY).field(HISTO_FIELD); + aggBuilder.subAggregation(new CumulativeSumPipelineAggregationBuilder("cusum", "_count")); + + executeTestCase(query, aggBuilder, histogram -> { + assertEquals(10, ((Histogram)histogram).getBuckets().size()); + List buckets = ((Histogram)histogram).getBuckets(); + double sum = 1.0; + for (Histogram.Bucket bucket : buckets) { + assertThat(((InternalSimpleValue) (bucket.getAggregations().get("cusum"))).value(), equalTo(sum)); + assertTrue(AggregationInspectionHelper.hasValue(((InternalSimpleValue) (bucket.getAggregations().get("cusum"))))); + sum += 1.0; + } + }); + } + public void testDocCount() throws IOException { Query query = new MatchAllDocsQuery(); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestMultiValueAggregation.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestMultiValueAggregation.java index f8996966fa1f6..cfa429e91e83d 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestMultiValueAggregation.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestMultiValueAggregation.java @@ -60,4 +60,4 @@ protected int doHashCode() { protected boolean doEquals(Object obj) { throw new UnsupportedOperationException(); } -} \ No newline at end of file +} From eb43ab6d6081ca4f239a0e4c2672658b1f9d9a26 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Tue, 22 Jan 2019 10:57:37 -0700 Subject: [PATCH 02/24] Implement leader rate limiting for file restore (#37677) This is related to #35975. This commit implements rate limiting on the leader side using the CombinedRateLimiter. --- .../repository/CcrRestoreSourceService.java | 19 +++++++++++++-- .../xpack/ccr/CcrRepositoryIT.java | 24 ++++++++++++++++--- 2 files changed, 38 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java index 1c7f9f95adbbe..a72b2f21d71df 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java @@ -16,8 +16,10 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.CombinedRateLimiter; import org.elasticsearch.common.util.concurrent.AbstractRefCounted; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.KeyedLock; @@ -42,6 +44,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.Consumer; +import java.util.function.LongConsumer; public class CcrRestoreSourceService extends AbstractLifecycleComponent implements IndexEventListener { @@ -52,6 +55,7 @@ public class CcrRestoreSourceService extends AbstractLifecycleComponent implemen private final CopyOnWriteArrayList> closeSessionListeners = new CopyOnWriteArrayList<>(); private final ThreadPool threadPool; private final CcrSettings ccrSettings; + private final CounterMetric throttleTime = new CounterMetric(); public CcrRestoreSourceService(ThreadPool threadPool, CcrSettings ccrSettings) { this.threadPool = threadPool; @@ -136,7 +140,7 @@ public synchronized SessionReader getSessionReader(String sessionUUID) { throw new IllegalArgumentException("session [" + sessionUUID + "] not found"); } restore.idle = false; - return new SessionReader(restore); + return new SessionReader(restore, ccrSettings, throttleTime::inc); } private void internalCloseSession(String sessionUUID, boolean throwIfSessionMissing) { @@ -182,6 +186,10 @@ private void maybeTimeout(String sessionUUID) { } } + public long getThrottleTime() { + return this.throttleTime.count(); + } + private static class RestoreSession extends AbstractRefCounted { private final String sessionUUID; @@ -254,9 +262,13 @@ protected void closeInternal() { public static class SessionReader implements Closeable { private final RestoreSession restoreSession; + private final CcrSettings ccrSettings; + private final LongConsumer throttleListener; - private SessionReader(RestoreSession restoreSession) { + private SessionReader(RestoreSession restoreSession, CcrSettings ccrSettings, LongConsumer throttleListener) { this.restoreSession = restoreSession; + this.ccrSettings = ccrSettings; + this.throttleListener = throttleListener; restoreSession.incRef(); } @@ -270,6 +282,9 @@ private SessionReader(RestoreSession restoreSession) { * @throws IOException if the read fails */ public long readFileBytes(String fileName, BytesReference reference) throws IOException { + CombinedRateLimiter rateLimiter = ccrSettings.getRateLimiter(); + long throttleTime = rateLimiter.maybePause(reference.length()); + throttleListener.accept(throttleTime); return restoreSession.readFileBytes(fileName, reference); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java index 47cc1c528fa5c..0a3669734dc6b 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java @@ -238,9 +238,15 @@ public void testDocsAreRecovered() throws Exception { } public void testRateLimitingIsEmployed() throws Exception { + boolean followerRateLimiting = randomBoolean(); + ClusterUpdateSettingsRequest settingsRequest = new ClusterUpdateSettingsRequest(); settingsRequest.persistentSettings(Settings.builder().put(CcrSettings.RECOVERY_MAX_BYTES_PER_SECOND.getKey(), "10K")); - assertAcked(followerClient().admin().cluster().updateSettings(settingsRequest).actionGet()); + if (followerRateLimiting) { + assertAcked(followerClient().admin().cluster().updateSettings(settingsRequest).actionGet()); + } else { + assertAcked(leaderClient().admin().cluster().updateSettings(settingsRequest).actionGet()); + } String leaderClusterRepoName = CcrRepository.NAME_PREFIX + "leader_cluster"; String leaderIndex = "index1"; @@ -256,11 +262,15 @@ public void testRateLimitingIsEmployed() throws Exception { final ClusterService clusterService = getFollowerCluster().getCurrentMasterNodeInstance(ClusterService.class); List repositories = new ArrayList<>(); + List restoreSources = new ArrayList<>(); for (RepositoriesService repositoriesService : getFollowerCluster().getDataOrMasterNodeInstances(RepositoriesService.class)) { Repository repository = repositoriesService.repository(leaderClusterRepoName); repositories.add((CcrRepository) repository); } + for (CcrRestoreSourceService restoreSource : getLeaderCluster().getDataOrMasterNodeInstances(CcrRestoreSourceService.class)) { + restoreSources.add(restoreSource); + } logger.info("--> indexing some data"); for (int i = 0; i < 100; i++) { @@ -282,12 +292,20 @@ public void testRateLimitingIsEmployed() throws Exception { restoreService.restoreSnapshot(restoreRequest, waitForRestore(clusterService, future)); future.actionGet(); - assertTrue(repositories.stream().anyMatch(cr -> cr.getRestoreThrottleTimeInNanos() > 0)); + if (followerRateLimiting) { + assertTrue(repositories.stream().anyMatch(cr -> cr.getRestoreThrottleTimeInNanos() > 0)); + } else { + assertTrue(restoreSources.stream().anyMatch(cr -> cr.getThrottleTime() > 0)); + } settingsRequest = new ClusterUpdateSettingsRequest(); ByteSizeValue defaultValue = CcrSettings.RECOVERY_MAX_BYTES_PER_SECOND.getDefault(Settings.EMPTY); settingsRequest.persistentSettings(Settings.builder().put(CcrSettings.RECOVERY_MAX_BYTES_PER_SECOND.getKey(), defaultValue)); - assertAcked(followerClient().admin().cluster().updateSettings(settingsRequest).actionGet()); + if (followerRateLimiting) { + assertAcked(followerClient().admin().cluster().updateSettings(settingsRequest).actionGet()); + } else { + assertAcked(leaderClient().admin().cluster().updateSettings(settingsRequest).actionGet()); + } } public void testFollowerMappingIsUpdated() throws IOException { From 940f6ba4c1192e1663168ebce4a42da6f0352d87 Mon Sep 17 00:00:00 2001 From: Brandon Kobel Date: Tue, 22 Jan 2019 12:09:08 -0800 Subject: [PATCH 03/24] Remove kibana_user and kibana_dashboard_only_user index privileges (#37441) * Remove kibana_user and kibana_dashboard_only_user .kibana* index privileges * Removing unused imports --- .../authz/store/ReservedRolesStore.java | 9 ++--- .../authz/store/ReservedRolesStoreTests.java | 27 --------------- .../integration/KibanaUserRoleIntegTests.java | 33 ------------------- 3 files changed, 2 insertions(+), 67 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index f15e078f9a941..dfd276f4ee9f0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -46,9 +46,7 @@ private static Map initializeReservedRoles() { .put("superuser", SUPERUSER_ROLE_DESCRIPTOR) .put("transport_client", new RoleDescriptor("transport_client", new String[] { "transport_client" }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) - .put("kibana_user", new RoleDescriptor("kibana_user", null, new RoleDescriptor.IndicesPrivileges[] { - RoleDescriptor.IndicesPrivileges.builder().indices(".kibana*").privileges("manage", "read", "index", "delete") - .build() }, new RoleDescriptor.ApplicationResourcePrivileges[] { + .put("kibana_user", new RoleDescriptor("kibana_user", null, null, new RoleDescriptor.ApplicationResourcePrivileges[] { RoleDescriptor.ApplicationResourcePrivileges.builder() .application("kibana-.kibana").resources("*").privileges("all").build() }, null, null, @@ -97,10 +95,7 @@ private static Map initializeReservedRoles() { .put("kibana_dashboard_only_user", new RoleDescriptor( "kibana_dashboard_only_user", null, - new RoleDescriptor.IndicesPrivileges[] { - RoleDescriptor.IndicesPrivileges.builder() - .indices(".kibana*").privileges("read", "view_index_metadata").build() - }, + null, new RoleDescriptor.ApplicationResourcePrivileges[] { RoleDescriptor.ApplicationResourcePrivileges.builder() .application("kibana-.kibana").resources("*").privileges("read").build() }, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 7ad03846c1d7e..5a567ad13ff80 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -312,20 +312,6 @@ public void testKibanaUserRole() { assertThat(kibanaUserRole.indices().allowedIndicesMatcher("indices:foo") .test(randomAlphaOfLengthBetween(8, 24)), is(false)); - Arrays.asList(".kibana", ".kibana-devnull").forEach((index) -> { - logger.info("index name [{}]", index); - assertThat(kibanaUserRole.indices().allowedIndicesMatcher("indices:foo").test(index), is(false)); - assertThat(kibanaUserRole.indices().allowedIndicesMatcher("indices:bar").test(index), is(false)); - - assertThat(kibanaUserRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(true)); - assertThat(kibanaUserRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(true)); - assertThat(kibanaUserRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); - assertThat(kibanaUserRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(true)); - assertThat(kibanaUserRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); - assertThat(kibanaUserRole.indices().allowedIndicesMatcher(MultiSearchAction.NAME).test(index), is(true)); - assertThat(kibanaUserRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(true)); - }); - final String randomApplication = "kibana-" + randomAlphaOfLengthBetween(8, 24); assertThat(kibanaUserRole.application().grants(new ApplicationPrivilege(randomApplication, "app-random", "all"), "*"), is(false)); @@ -569,19 +555,6 @@ public void testKibanaDashboardOnlyUserRole() { assertThat(dashboardsOnlyUserRole.runAs().check(randomAlphaOfLengthBetween(1, 12)), is(false)); - final String index = ".kibana"; - assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher("indices:foo").test(index), is(false)); - assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher("indices:bar").test(index), is(false)); - - assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(false)); - assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(false)); - assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(false)); - assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(false)); - - assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(index), is(true)); - assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); - assertThat(dashboardsOnlyUserRole.indices().allowedIndicesMatcher(MultiSearchAction.NAME).test(index), is(true)); - final String randomApplication = "kibana-" + randomAlphaOfLengthBetween(8, 24); assertThat(dashboardsOnlyUserRole.application().grants(new ApplicationPrivilege(randomApplication, "app-random", "all"), "*"), is(false)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java index 597314b2a6196..19533542686de 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java @@ -5,15 +5,11 @@ */ package org.elasticsearch.integration; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; -import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.MappingMetaData; @@ -23,11 +19,9 @@ import org.elasticsearch.test.NativeRealmIntegTestCase; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; -import java.util.Locale; import java.util.Map; import static java.util.Collections.singletonMap; -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -149,33 +143,6 @@ public void testGetIndex() throws Exception { assertThat(response.getIndices(), arrayContaining(index)); } - public void testCreateIndexDeleteInKibanaIndex() throws Exception { - final String index = randomBoolean()? ".kibana" : ".kibana-" + randomAlphaOfLengthBetween(1, 10).toLowerCase(Locale.ENGLISH); - - if (randomBoolean()) { - CreateIndexResponse createIndexResponse = client().filterWithHeader(singletonMap("Authorization", - UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD))) - .admin().indices().prepareCreate(index).get(); - assertThat(createIndexResponse.isAcknowledged(), is(true)); - } - - IndexResponse response = client() - .filterWithHeader(singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD))) - .prepareIndex() - .setIndex(index) - .setType("dashboard") - .setSource("foo", "bar") - .setRefreshPolicy(IMMEDIATE) - .get(); - assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); - - DeleteResponse deleteResponse = client() - .filterWithHeader(singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD))) - .prepareDelete(index, "dashboard", response.getId()) - .get(); - assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); - } - public void testGetMappings() throws Exception { final String index = "logstash-20-12-2015"; final String type = "event"; From 992bfd2064a4074b8eb278b6354ae68e9fa87fda Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Tue, 22 Jan 2019 12:13:01 -0800 Subject: [PATCH 04/24] Remove additional references to 'type' from the _bulk documentation. (#37722) --- docs/reference/docs/bulk.asciidoc | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index 0aae2365d965e..16e93ac196c8d 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -57,7 +57,7 @@ newlines. Example: [source,js] -------------------------------------------------- $ cat requests -{ "index" : { "_index" : "test", "_type" : "_doc", "_id" : "1" } } +{ "index" : { "_index" : "test", "_id" : "1" } } { "field1" : "value1" } $ curl -s -H "Content-Type: application/x-ndjson" -XPOST localhost:9200/_bulk --data-binary "@requests"; echo {"took":7, "errors": false, "items":[{"index":{"_index":"test","_type":"_doc","_id":"1","_version":1,"result":"created","forced_refresh":false}}]} @@ -72,12 +72,12 @@ example of a correct sequence of bulk commands: [source,js] -------------------------------------------------- POST _bulk -{ "index" : { "_index" : "test", "_type" : "_doc", "_id" : "1" } } +{ "index" : { "_index" : "test", "_id" : "1" } } { "field1" : "value1" } -{ "delete" : { "_index" : "test", "_type" : "_doc", "_id" : "2" } } -{ "create" : { "_index" : "test", "_type" : "_doc", "_id" : "3" } } +{ "delete" : { "_index" : "test", "_id" : "2" } } +{ "create" : { "_index" : "test", "_id" : "3" } } { "field1" : "value3" } -{ "update" : {"_id" : "1", "_type" : "_doc", "_index" : "test"} } +{ "update" : {"_id" : "1", "_index" : "test"} } { "doc" : {"field2" : "value2"} } -------------------------------------------------- // CONSOLE @@ -265,15 +265,15 @@ the options. Example with update actions: [source,js] -------------------------------------------------- POST _bulk -{ "update" : {"_id" : "1", "_type" : "_doc", "_index" : "index1", "retry_on_conflict" : 3} } +{ "update" : {"_id" : "1", "_index" : "index1", "retry_on_conflict" : 3} } { "doc" : {"field" : "value"} } -{ "update" : { "_id" : "0", "_type" : "_doc", "_index" : "index1", "retry_on_conflict" : 3} } +{ "update" : { "_id" : "0", "_index" : "index1", "retry_on_conflict" : 3} } { "script" : { "source": "ctx._source.counter += params.param1", "lang" : "painless", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}} -{ "update" : {"_id" : "2", "_type" : "_doc", "_index" : "index1", "retry_on_conflict" : 3} } +{ "update" : {"_id" : "2", "_index" : "index1", "retry_on_conflict" : 3} } { "doc" : {"field" : "value"}, "doc_as_upsert" : true } -{ "update" : {"_id" : "3", "_type" : "_doc", "_index" : "index1", "_source" : true} } +{ "update" : {"_id" : "3", "_index" : "index1", "_source" : true} } { "doc" : {"field" : "value"} } -{ "update" : {"_id" : "4", "_type" : "_doc", "_index" : "index1"} } +{ "update" : {"_id" : "4", "_index" : "index1"} } { "doc" : {"field" : "value"}, "_source": true} -------------------------------------------------- // CONSOLE From c28479819efa201ab141a8c5c5ccd856c5a00116 Mon Sep 17 00:00:00 2001 From: olcbean Date: Tue, 22 Jan 2019 21:36:42 +0100 Subject: [PATCH 05/24] Fix a typo in a warning message in TestFixturesPlugin (#37631) --- .../elasticsearch/gradle/testfixtures/TestFixturesPlugin.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java index c13bcc02cbe89..32a50fb4b0750 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java @@ -96,7 +96,7 @@ public void apply(Project project) { if (dockerComposeSupported(project) == false) { project.getLogger().warn( "Tests for {} require docker-compose at /usr/local/bin/docker-compose or /usr/bin/docker-compose " + - "but none could not be found so these will be skipped", project.getPath() + "but none could be found so these will be skipped", project.getPath() ); tasks.withType(getTaskClass("com.carrotsearch.gradle.junit4.RandomizedTestingTask"), task -> task.setEnabled(false) From 942fc13af5985eda8723f9c072194c7e4c0dc5fc Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Tue, 22 Jan 2019 16:49:03 -0500 Subject: [PATCH 06/24] Use plain text instead of latexmath As latexmath is not rendered, using plain text instead Closes #37718 --- docs/reference/query-dsl/script-score-query.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/query-dsl/script-score-query.asciidoc b/docs/reference/query-dsl/script-score-query.asciidoc index dfd06a04523c3..2189319a1fc86 100644 --- a/docs/reference/query-dsl/script-score-query.asciidoc +++ b/docs/reference/query-dsl/script-score-query.asciidoc @@ -53,7 +53,7 @@ rewriting equivalent functions of your own, as these functions try to be the most efficient by using the internal mechanisms. ===== rational -latexmath:[rational(value,k) = value/(k + value)] +`rational(value,k) = value/(k + value)` [source,js] -------------------------------------------------- @@ -64,7 +64,7 @@ latexmath:[rational(value,k) = value/(k + value)] // NOTCONSOLE ===== sigmoid -latexmath:[sigmoid(value, k, a) = value^a/ (k^a + value^a)] +`sigmoid(value, k, a) = value^a/ (k^a + value^a)` [source,js] -------------------------------------------------- From e2e00cd2450058384ce4bbdf16b130824a75f81a Mon Sep 17 00:00:00 2001 From: Andrey Ershov Date: Wed, 23 Jan 2019 07:21:26 +0100 Subject: [PATCH 07/24] Fix MetaStateFormat tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit It's not safe to continue writing state using MetaDataStateFormat after dirty WriteStateException occurred if it's not recovered by successful subsequent state write. We've encountered test failure of testFailRandomlyAndReadAnyState. The test breaks in the following way. There are 3 state paths. And what happens next Successful write at the beginning of the test yields 0 0 0 state files in the directories. 1st write in the loop is unsuccessful, but not dirty - 0 0 0. 2nd write in the loop is not successful and dirty (failure during fsync), however before removing new files we have 1 1 1. But now during deletion, the first deletion fails and we get - 1 0 0. 3rd write in the loop is unsuccessful, but not dirty - so we want to keep old generation, which happens to be the 1st generation, so now we have 1 x x in state folders. Now we assert that we either load 0 or 1 state from the state folders and select only 2rd and 3th folder to emulate disk failures - this results in NPE because there is nothing in these folders. Fortunately, this won’t be a problem in real life, because if there is a dirty exception, we shut down the node and make sure we perform a successful write on the node startup. --- .../org/elasticsearch/gateway/GatewayMetaStateTests.java | 6 ++++++ .../org/elasticsearch/gateway/MetaDataStateFormatTests.java | 6 ++++++ 2 files changed, 12 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java index 7a6b751de74f9..22259b919ec6f 100644 --- a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java @@ -412,6 +412,12 @@ public void testAtomicityWithFailures() throws IOException { } catch (WriteStateException e) { if (e.isDirty()) { possibleMetaData.add(metaData); + /* + * If dirty WriteStateException occurred, it's only safe to proceed if there is subsequent + * successful write of metadata and Manifest. We prefer to break here, not to over complicate test logic. + * See also MetaDataStateFormat#testFailRandomlyAndReadAnyState, that does not break. + */ + break; } } } diff --git a/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java index 2522452f72774..40f3bd8a01623 100644 --- a/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java @@ -408,6 +408,12 @@ public void testFailRandomlyAndReadAnyState() throws IOException { Path[] randomPaths = randomSubsetOf(randomIntBetween(1, paths.length), paths).toArray(new Path[0]); DummyState stateOnDisk = format.loadLatestState(logger, NamedXContentRegistry.EMPTY, randomPaths); assertTrue(possibleStates.contains(stateOnDisk)); + if (possibleStates.size() > 1) { + //if there was a WriteStateException we need to override current state before we continue + newState = writeAndReadStateSuccessfully(format, paths); + possibleStates.clear(); + possibleStates.add(newState); + } } writeAndReadStateSuccessfully(format, paths); From 7c6566e14c22d80db7e91a422c5b3c0df7b10b2d Mon Sep 17 00:00:00 2001 From: Andrey Ershov Date: Wed, 23 Jan 2019 07:22:41 +0100 Subject: [PATCH 08/24] Migrate SpecificMasterNodesIT to Zen2 (#37532) 1. testSimpleOnlyMasterNodeElection - requires cluster bootstrap when the first master node is started. 2. testElectOnlyBetweenMasterNodes - requires cluster bootstrap when the first master node is started and requires adding voting exclusion before shutting down the first master node. 3. testAliasFilterValidation - requires cluster bootstrap when the first master node is started. --- .../cluster/SpecificMasterNodesIT.java | 27 ++++++++++++++++--- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java b/server/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java index 3d945b7a7bb68..aaef1e58fb50e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java @@ -20,6 +20,9 @@ package org.elasticsearch.cluster; import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction; +import org.elasticsearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; +import org.elasticsearch.cluster.coordination.ClusterBootstrapService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.discovery.MasterNotDiscoveredException; @@ -28,10 +31,12 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.test.junit.annotations.TestLogging; import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.ExecutionException; import static org.elasticsearch.discovery.zen.ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -45,10 +50,22 @@ public class SpecificMasterNodesIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder().put(super.nodeSettings(nodeOrdinal)) - .put(TestZenDiscovery.USE_ZEN2.getKey(), false) // does unsafe things .put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 1).build(); } + @Override + protected List addExtraClusterBootstrapSettings(List allNodesSettings) { + // if it's the first master in the cluster bootstrap the cluster with this node name + Settings settings = allNodesSettings.get(0); + if (internalCluster().numMasterNodes() == 0 && settings.getAsBoolean(Node.NODE_MASTER_SETTING.getKey(), false)) { + return Collections.singletonList(Settings.builder() + .put(settings) + .put(ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING.getKey(), settings.get(Node.NODE_NAME_SETTING.getKey())) + .build()); + } + return allNodesSettings; + } + public void testSimpleOnlyMasterNodeElection() throws IOException { logger.info("--> start data node / non master node"); internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), true) @@ -89,7 +106,7 @@ public void testSimpleOnlyMasterNodeElection() throws IOException { .execute().actionGet().getState().nodes().getMasterNode().getName(), equalTo(nextMasterEligibleNodeName)); } - public void testElectOnlyBetweenMasterNodes() throws IOException { + public void testElectOnlyBetweenMasterNodes() throws IOException, ExecutionException, InterruptedException { logger.info("--> start data node / non master node"); internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), true) .put(Node.NODE_MASTER_SETTING.getKey(), false).put("discovery.initial_state_timeout", "1s")); @@ -119,6 +136,8 @@ public void testElectOnlyBetweenMasterNodes() throws IOException { .execute().actionGet().getState().nodes().getMasterNode().getName(), equalTo(masterNodeName)); logger.info("--> closing master node (1)"); + client().execute(AddVotingConfigExclusionsAction.INSTANCE, + new AddVotingConfigExclusionsRequest(new String[]{masterNodeName})).get(); internalCluster().stopCurrentMasterNode(); assertThat(internalCluster().nonMasterClient().admin().cluster().prepareState() .execute().actionGet().getState().nodes().getMasterNode().getName(), equalTo(nextMasterEligableNodeName)); @@ -126,7 +145,7 @@ public void testElectOnlyBetweenMasterNodes() throws IOException { .execute().actionGet().getState().nodes().getMasterNode().getName(), equalTo(nextMasterEligableNodeName)); } - public void testAliasFilterValidation() throws Exception { + public void testAliasFilterValidation() { logger.info("--> start master node / non data"); internalCluster().startNode(Settings.builder() .put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true)); From 534ba1dd349a4baadd6ab8dc63dab0a849733804 Mon Sep 17 00:00:00 2001 From: Andrey Ershov Date: Wed, 23 Jan 2019 07:23:06 +0100 Subject: [PATCH 09/24] Remove LicenseServiceClusterNotRecoveredTests (#37528) While tests migration from Zen1 to Zen2, we've encountered this test. This test is organized as follows: Starts the first cluster node. Starts the second cluster node. Checks that license is active. Interesting fact that adding assertLicenseActive(true) between 1 and 2 also makes the test pass. assertLicenseActive retrieves XPackLicenseState from the nodes and checks that active flag is set. It's set to true even before the cluster is initialized. So this test does not make sense. --- ...icenseServiceClusterNotRecoveredTests.java | 60 ------------------- 1 file changed, 60 deletions(-) delete mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseServiceClusterNotRecoveredTests.java diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseServiceClusterNotRecoveredTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseServiceClusterNotRecoveredTests.java deleted file mode 100644 index 69710b24bb230..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseServiceClusterNotRecoveredTests.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.license; - -import org.elasticsearch.analysis.common.CommonAnalysisPlugin; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.discovery.TestZenDiscovery; -import org.elasticsearch.transport.Netty4Plugin; -import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; - -import java.util.Arrays; -import java.util.Collection; - -import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; - -@ESIntegTestCase.ClusterScope(scope = TEST, numDataNodes = 0, numClientNodes = 0, maxNumDataNodes = 0, transportClientRatio = 0, - autoMinMasterNodes = false) -public class LicenseServiceClusterNotRecoveredTests extends AbstractLicensesIntegrationTestCase { - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return nodeSettingsBuilder(nodeOrdinal).build(); - } - - @Override - protected boolean addMockHttpTransport() { - return false; - } - - private Settings.Builder nodeSettingsBuilder(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put("node.data", true) - .put(TestZenDiscovery.USE_ZEN2.getKey(), false) // this test is just weird - .put("resource.reload.interval.high", "500ms"); // for license mode file watcher - } - - @Override - protected Collection> nodePlugins() { - return Arrays.asList(LocalStateCompositeXPackPlugin.class, CommonAnalysisPlugin.class, Netty4Plugin.class); - } - - @Override - protected Collection> transportClientPlugins() { - return nodePlugins(); - } - - public void testClusterNotRecovered() throws Exception { - logger.info("--> start one master out of two [recovery state]"); - internalCluster().startNode(nodeSettingsBuilder(0).put("discovery.zen.minimum_master_nodes", 2).put("node.master", true)); - logger.info("--> start second master out of two [recovered state]"); - internalCluster().startNode(nodeSettingsBuilder(1).put("discovery.zen.minimum_master_nodes", 2).put("node.master", true)); - assertLicenseActive(true); - } -} From 52ba407931093ba86538596639eaf35b2a858d01 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 23 Jan 2019 09:01:58 +0100 Subject: [PATCH 10/24] Expose sequence number and primary terms in search responses (#37639) Users may require the sequence number and primary terms to perform optimistic concurrency control operations. Currently, you can get the sequence number via the `docvalues_fields` API but the primary term is not accessible because it is maintained by the `SeqNoFieldMapper` and the infrastructure can't find it. This commit adds a dedicated sub fetch phase to return both numbers that is connected to a new `seq_no_primary_term` parameter. --- .../metrics/tophits-aggregation.asciidoc | 1 + .../docs/concurrency-control.asciidoc | 2 +- docs/reference/search/request-body.asciidoc | 2 +- .../search/request/inner-hits.asciidoc | 1 + .../request/version-and-seq-no.asciidoc | 34 +++++++++ .../reference/search/request/version.asciidoc | 16 ---- .../join/query/HasChildQueryBuilderTests.java | 1 + .../elasticsearch/join/query/InnerHitsIT.java | 22 +++++- .../rest-api-spec/test/11_parent_child.yml | 55 +++++++++----- .../remote/RemoteRequestBuildersTests.java | 2 +- .../test/multi_cluster/10_basic.yml | 3 + .../resources/rest-api-spec/api/search.json | 4 + .../200_top_hits_metric.yml | 44 +++++++++-- .../test/search/110_field_collapsing.yml | 54 ++++++++++++++ .../test/search/300_sequence_numbers.yml | 74 +++++++++++++++++++ .../action/search/ExpandSearchPhase.java | 1 + .../index/query/InnerHitBuilder.java | 24 +++++- .../index/query/InnerHitContextBuilder.java | 1 + .../index/query/NestedQueryBuilder.java | 8 ++ .../rest/action/search/RestSearchAction.java | 3 + .../search/DefaultSearchContext.java | 11 +++ .../org/elasticsearch/search/SearchHit.java | 54 +++++++++++++- .../elasticsearch/search/SearchModule.java | 2 + .../elasticsearch/search/SearchService.java | 5 ++ .../metrics/TopHitsAggregationBuilder.java | 39 +++++++++- .../metrics/TopHitsAggregatorFactory.java | 7 +- .../search/builder/SearchSourceBuilder.java | 38 +++++++++- .../SeqNoPrimaryTermFetchSubPhase.java | 69 +++++++++++++++++ .../internal/FilteredSearchContext.java | 10 +++ .../search/internal/SearchContext.java | 8 +- .../search/internal/SubSearchContext.java | 11 +++ .../action/search/ExpandSearchPhaseTests.java | 4 +- .../index/query/InnerHitBuilderTests.java | 7 ++ .../index/query/NestedQueryBuilderTests.java | 16 ++-- .../elasticsearch/search/SearchHitTests.java | 5 ++ .../aggregations/metrics/TopHitsIT.java | 12 +++ .../aggregations/metrics/TopHitsTests.java | 3 + .../search/RandomSearchRequestGenerator.java | 3 + .../elasticsearch/test/TestSearchContext.java | 10 +++ 39 files changed, 603 insertions(+), 63 deletions(-) create mode 100644 docs/reference/search/request/version-and-seq-no.asciidoc delete mode 100644 docs/reference/search/request/version.asciidoc create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search/300_sequence_numbers.yml create mode 100644 server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermFetchSubPhase.java diff --git a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc index c3b6a9bad4cfc..1ea38fec9657b 100644 --- a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc @@ -25,6 +25,7 @@ The top_hits aggregation returns regular search hits, because of this many per h * <> * <> * <> +* <> ==== Example diff --git a/docs/reference/docs/concurrency-control.asciidoc b/docs/reference/docs/concurrency-control.asciidoc index e695e6b5127c9..780a9c7cf76fc 100644 --- a/docs/reference/docs/concurrency-control.asciidoc +++ b/docs/reference/docs/concurrency-control.asciidoc @@ -87,7 +87,7 @@ returns: Note: The <> can return the `_seq_no` and `_primary_term` -for each search hit by requesting the `_seq_no` and `_primary_term` <>. +for each search hit by setting <>. The sequence number and the primary term uniquely identify a change. By noting down the sequence number and primary term returned, you can make sure to only change the diff --git a/docs/reference/search/request-body.asciidoc b/docs/reference/search/request-body.asciidoc index 9970c4cc6223f..dac7622aab8ed 100644 --- a/docs/reference/search/request-body.asciidoc +++ b/docs/reference/search/request-body.asciidoc @@ -213,7 +213,7 @@ include::request/preference.asciidoc[] include::request/explain.asciidoc[] -include::request/version.asciidoc[] +include::request/version-and-seq-no.asciidoc[] include::request/index-boost.asciidoc[] diff --git a/docs/reference/search/request/inner-hits.asciidoc b/docs/reference/search/request/inner-hits.asciidoc index bcd2c297e5da3..b287b1609703e 100644 --- a/docs/reference/search/request/inner-hits.asciidoc +++ b/docs/reference/search/request/inner-hits.asciidoc @@ -76,6 +76,7 @@ Inner hits also supports the following per document features: * <> * <> * <> +* <> [[nested-inner-hits]] ==== Nested inner hits diff --git a/docs/reference/search/request/version-and-seq-no.asciidoc b/docs/reference/search/request/version-and-seq-no.asciidoc new file mode 100644 index 0000000000000..2bca4c985b290 --- /dev/null +++ b/docs/reference/search/request/version-and-seq-no.asciidoc @@ -0,0 +1,34 @@ +[[search-request-seq-no-primary-term]] +=== Sequence Numbers and Primary Term + +Returns the sequence number and primary term of the last modification to each search hit. +See <> for more details. + +[source,js] +-------------------------------------------------- +GET /_search +{ + "seq_no_primary_term": true, + "query" : { + "term" : { "user" : "kimchy" } + } +} +-------------------------------------------------- +// CONSOLE + +[[search-request-version]] +=== Version + +Returns a version for each search hit. + +[source,js] +-------------------------------------------------- +GET /_search +{ + "version": true, + "query" : { + "term" : { "user" : "kimchy" } + } +} +-------------------------------------------------- +// CONSOLE diff --git a/docs/reference/search/request/version.asciidoc b/docs/reference/search/request/version.asciidoc deleted file mode 100644 index 57c6ce27feb91..0000000000000 --- a/docs/reference/search/request/version.asciidoc +++ /dev/null @@ -1,16 +0,0 @@ -[[search-request-version]] -=== Version - -Returns a version for each search hit. - -[source,js] --------------------------------------------------- -GET /_search -{ - "version": true, - "query" : { - "term" : { "user" : "kimchy" } - } -} --------------------------------------------------- -// CONSOLE diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java index 6e4e79d16e5a5..eea01d61386de 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java @@ -252,6 +252,7 @@ public void testFromJson() throws IOException { " \"from\" : 0,\n" + " \"size\" : 100,\n" + " \"version\" : false,\n" + + " \"seq_no_primary_term\" : false,\n" + " \"explain\" : false,\n" + " \"track_scores\" : false,\n" + " \"sort\" : [ {\n" + diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/InnerHitsIT.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/InnerHitsIT.java index 7a8d2cd9dbc21..89929985ea594 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/InnerHitsIT.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/InnerHitsIT.java @@ -56,6 +56,8 @@ import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.nestedQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery; import static org.elasticsearch.join.query.JoinQueryBuilders.hasParentQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -66,6 +68,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -133,9 +136,10 @@ public void testSimpleParentChild() throws Exception { assertThat(innerHits.getAt(1).getId(), equalTo("c2")); assertThat(innerHits.getAt(1).getType(), equalTo("doc")); + final boolean seqNoAndTerm = randomBoolean(); response = client().prepareSearch("articles") .setQuery(hasChildQuery("comment", matchQuery("message", "elephant"), ScoreMode.None) - .innerHit(new InnerHitBuilder())) + .innerHit(new InnerHitBuilder().setSeqNoAndPrimaryTerm(seqNoAndTerm))) .get(); assertNoFailures(response); assertHitCount(response, 1); @@ -152,6 +156,22 @@ public void testSimpleParentChild() throws Exception { assertThat(innerHits.getAt(2).getId(), equalTo("c6")); assertThat(innerHits.getAt(2).getType(), equalTo("doc")); + if (seqNoAndTerm) { + assertThat(innerHits.getAt(0).getPrimaryTerm(), equalTo(1L)); + assertThat(innerHits.getAt(1).getPrimaryTerm(), equalTo(1L)); + assertThat(innerHits.getAt(2).getPrimaryTerm(), equalTo(1L)); + assertThat(innerHits.getAt(0).getSeqNo(), greaterThanOrEqualTo(0L)); + assertThat(innerHits.getAt(1).getSeqNo(), greaterThanOrEqualTo(0L)); + assertThat(innerHits.getAt(2).getSeqNo(), greaterThanOrEqualTo(0L)); + } else { + assertThat(innerHits.getAt(0).getPrimaryTerm(), equalTo(UNASSIGNED_PRIMARY_TERM)); + assertThat(innerHits.getAt(1).getPrimaryTerm(), equalTo(UNASSIGNED_PRIMARY_TERM)); + assertThat(innerHits.getAt(2).getPrimaryTerm(), equalTo(UNASSIGNED_PRIMARY_TERM)); + assertThat(innerHits.getAt(0).getSeqNo(), equalTo(UNASSIGNED_SEQ_NO)); + assertThat(innerHits.getAt(1).getSeqNo(), equalTo(UNASSIGNED_SEQ_NO)); + assertThat(innerHits.getAt(2).getSeqNo(), equalTo(UNASSIGNED_SEQ_NO)); + } + response = client().prepareSearch("articles") .setQuery( hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit( diff --git a/modules/parent-join/src/test/resources/rest-api-spec/test/11_parent_child.yml b/modules/parent-join/src/test/resources/rest-api-spec/test/11_parent_child.yml index 7273ac6a95d5e..61af4ab1acb59 100644 --- a/modules/parent-join/src/test/resources/rest-api-spec/test/11_parent_child.yml +++ b/modules/parent-join/src/test/resources/rest-api-spec/test/11_parent_child.yml @@ -11,26 +11,26 @@ setup: relations: parent: child ---- -"Parent/child inner hits": - - do: - index: - index: test - type: doc - id: 1 - body: {"foo": "bar", "join_field": {"name" : "parent"} } + - do: + index: + index: test + type: doc + id: 1 + body: {"foo": "bar", "join_field": {"name" : "parent"} } - - do: - index: - index: test - type: doc - id: 2 - routing: 1 - body: {"bar": "baz", "join_field": { "name" : "child", "parent": "1"} } + - do: + index: + index: test + type: doc + id: 2 + routing: 1 + body: {"bar": "baz", "join_field": { "name" : "child", "parent": "1"} } - - do: - indices.refresh: {} + - do: + indices.refresh: {} +--- +"Parent/child inner hits": - do: search: rest_total_hits_as_int: true @@ -41,3 +41,24 @@ setup: - match: { hits.hits.0.inner_hits.child.hits.hits.0._index: "test"} - match: { hits.hits.0.inner_hits.child.hits.hits.0._id: "2" } - is_false: hits.hits.0.inner_hits.child.hits.hits.0._nested + +--- +"Parent/child inner hits with seq no": + - skip: + version: " - 6.99.99" + reason: support was added in 7.0 + + - do: + search: + rest_total_hits_as_int: true + body: { "query" : { "has_child" : + { "type" : "child", "query" : { "match_all" : {} }, "inner_hits" : { "seq_no_primary_term": true} } + } } + - match: { hits.total: 1 } + - match: { hits.hits.0._index: "test" } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0.inner_hits.child.hits.hits.0._index: "test"} + - match: { hits.hits.0.inner_hits.child.hits.hits.0._id: "2" } + - is_false: hits.hits.0.inner_hits.child.hits.hits.0._nested + - gte: { hits.hits.0.inner_hits.child.hits.hits.0._seq_no: 0 } + - gte: { hits.hits.0.inner_hits.child.hits.hits.0._primary_term: 1 } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java index 2f801811327b8..0f985fd37016a 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java @@ -179,7 +179,7 @@ public void testInitialSearchParamsMisc() { fetchVersion = randomBoolean(); searchRequest.source().version(fetchVersion); } - + Map params = initialSearch(searchRequest, query, remoteVersion).getParameters(); if (scroll == null) { diff --git a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml index 70afa9bf917e8..5acf84139bbf4 100644 --- a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml +++ b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml @@ -30,6 +30,7 @@ rest_total_hits_as_int: true index: test_index,my_remote_cluster:test_index body: + seq_no_primary_term: true aggs: cluster: terms: @@ -37,6 +38,8 @@ - match: { _shards.total: 5 } - match: { hits.total: 11 } + - gte: { hits.hits.0._seq_no: 0 } + - gte: { hits.hits.0._primary_term: 1 } - length: { aggregations.cluster.buckets: 2 } - match: { aggregations.cluster.buckets.0.key: "remote_cluster" } - match: { aggregations.cluster.buckets.0.doc_count: 6 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json index 5834ca623a99b..9ac02b1214a2f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json @@ -164,6 +164,10 @@ "type" : "boolean", "description" : "Specify whether to return document version as part of a hit" }, + "seq_no_primary_term": { + "type" : "boolean", + "description" : "Specify whether to return sequence number and primary term of the last modification of each hit" + }, "request_cache": { "type" : "boolean", "description" : "Specify if request cache should be used for this request or not, defaults to index level setting" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml index 267a9e85d1d5d..775475e01a597 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml @@ -1,8 +1,4 @@ ---- -"top_hits aggregation with nested documents": - - skip: - version: " - 6.1.99" - reason: "<= 6.1 nodes don't always include index or id in nested top hits" +setup: - do: indices.create: index: my-index @@ -54,6 +50,12 @@ ] } +--- +"top_hits aggregation with nested documents": + - skip: + version: " - 6.1.99" + reason: "<= 6.1 nodes don't always include index or id in nested top hits" + - do: search: rest_total_hits_as_int: true @@ -81,3 +83,35 @@ - match: { aggregations.to-users.users.hits.hits.2._index: my-index } - match: { aggregations.to-users.users.hits.hits.2._nested.field: users } - match: { aggregations.to-users.users.hits.hits.2._nested.offset: 1 } + + +--- +"top_hits aggregation with sequence numbers": + - skip: + version: " - 6.99.99" + reason: support was added in 7.0 + + - do: + search: + rest_total_hits_as_int: true + body: + aggs: + groups: + terms: + field: group.keyword + aggs: + users: + top_hits: + sort: "users.last.keyword" + seq_no_primary_term: true + + - match: { hits.total: 2 } + - length: { aggregations.groups.buckets.0.users.hits.hits: 2 } + - match: { aggregations.groups.buckets.0.users.hits.hits.0._id: "1" } + - match: { aggregations.groups.buckets.0.users.hits.hits.0._index: my-index } + - gte: { aggregations.groups.buckets.0.users.hits.hits.0._seq_no: 0 } + - gte: { aggregations.groups.buckets.0.users.hits.hits.0._primary_term: 1 } + - match: { aggregations.groups.buckets.0.users.hits.hits.1._id: "2" } + - match: { aggregations.groups.buckets.0.users.hits.hits.1._index: my-index } + - gte: { aggregations.groups.buckets.0.users.hits.hits.1._seq_no: 0 } + - gte: { aggregations.groups.buckets.0.users.hits.hits.1._primary_term: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml index 7ba1d9b62fbc7..a85abb4e6e28b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml @@ -405,3 +405,57 @@ setup: - match: { hits.hits.1.inner_hits.sub_hits.hits.total: 3} - match: { hits.hits.2.fields.group_alias: [25] } - match: { hits.hits.2.inner_hits.sub_hits.hits.total: 2} + +--- +"field collapsing, inner_hits and seq_no": + + - skip: + version: " - 6.99.0" + reason: "sequence numbers introduced in 7.0.0" + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + collapse: { field: numeric_group, inner_hits: { + name: sub_hits, seq_no_primary_term: true, size: 2, sort: [{ sort: asc }] + } } + sort: [{ sort: desc }] + + - match: { hits.total: 6 } + - length: { hits.hits: 3 } + - match: { hits.hits.0._index: test } + - match: { hits.hits.0.fields.numeric_group: [3] } + - match: { hits.hits.0.sort: [36] } + - match: { hits.hits.0._id: "6" } + - match: { hits.hits.0.inner_hits.sub_hits.hits.total: 1 } + - length: { hits.hits.0.inner_hits.sub_hits.hits.hits: 1 } + - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "6" } + - gte: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._seq_no: 0 } + - gte: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._primary_term: 1 } + - match: { hits.hits.1._index: test } + - match: { hits.hits.1.fields.numeric_group: [1] } + - match: { hits.hits.1.sort: [24] } + - match: { hits.hits.1._id: "3" } + - match: { hits.hits.1.inner_hits.sub_hits.hits.total: 3 } + - length: { hits.hits.1.inner_hits.sub_hits.hits.hits: 2 } + - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.0._id: "2" } + - gte: { hits.hits.1.inner_hits.sub_hits.hits.hits.0._seq_no: 0 } + - gte: { hits.hits.1.inner_hits.sub_hits.hits.hits.0._primary_term: 1 } + - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "1" } + - gte: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._seq_no: 0 } + - gte: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._primary_term: 1 } + - match: { hits.hits.2._index: test } + - match: { hits.hits.2._type: test } + - match: { hits.hits.2.fields.numeric_group: [25] } + - match: { hits.hits.2.sort: [10] } + - match: { hits.hits.2._id: "4" } + - match: { hits.hits.2.inner_hits.sub_hits.hits.total: 2 } + - length: { hits.hits.2.inner_hits.sub_hits.hits.hits: 2 } + - match: { hits.hits.2.inner_hits.sub_hits.hits.hits.0._id: "5" } + - gte: { hits.hits.2.inner_hits.sub_hits.hits.hits.0._seq_no: 0 } + - gte: { hits.hits.2.inner_hits.sub_hits.hits.hits.0._primary_term: 1 } + - match: { hits.hits.2.inner_hits.sub_hits.hits.hits.1._id: "4" } + - gte: { hits.hits.2.inner_hits.sub_hits.hits.hits.1._seq_no: 0 } + - gte: { hits.hits.2.inner_hits.sub_hits.hits.hits.1._primary_term: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/300_sequence_numbers.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/300_sequence_numbers.yml new file mode 100644 index 0000000000000..9e838d1c58f77 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/300_sequence_numbers.yml @@ -0,0 +1,74 @@ +setup: + - do: + indices.create: + index: test_1 + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: foo } + +## we index again in order to make the seq# 1 (so we can check for the field existence with is_false) + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + + - do: + indices.refresh: + index: [test_1] + +--- +"sequence numbers are returned if requested from body": + - skip: + version: " - 6.99.99" + reason: sequence numbers were added in 7.0.0 + + - do: + search: + index: _all + body: + query: + match: + foo: bar + seq_no_primary_term: true + + - match: {hits.total.value: 1} + - match: {hits.hits.0._seq_no: 1} + - gte: {hits.hits.0._primary_term: 1} + +--- +"sequence numbers are returned if requested from url": + - skip: + version: " - 6.99.99" + reason: sequence numbers were added in 7.0.0 + + - do: + search: + index: _all + body: + query: + match: + foo: bar + seq_no_primary_term: true + + - match: {hits.total.value: 1} + - match: {hits.hits.0._seq_no: 1} + - gte: {hits.hits.0._primary_term: 1} + +--- +"sequence numbers are not returned if not requested": + - do: + search: + index: _all + body: + query: + match: + foo: bar + + - is_false: hits.hits.0._seq_no + - is_false: hits.hits.0._primary_term diff --git a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java index da481b7a4a8ee..10a85b723166c 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java @@ -153,6 +153,7 @@ private SearchSourceBuilder buildExpandSearchSourceBuilder(InnerHitBuilder optio groupSource.explain(options.isExplain()); groupSource.trackScores(options.isTrackScores()); groupSource.version(options.isVersion()); + groupSource.seqNoAndPrimaryTerm(options.isSeqNoAndPrimaryTerm()); if (innerCollapseBuilder != null) { groupSource.collapse(innerCollapseBuilder); } diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java index 733875075b051..f5be9650b8d5c 100644 --- a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java @@ -68,6 +68,7 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject { PARSER.declareInt(InnerHitBuilder::setSize, SearchSourceBuilder.SIZE_FIELD); PARSER.declareBoolean(InnerHitBuilder::setExplain, SearchSourceBuilder.EXPLAIN_FIELD); PARSER.declareBoolean(InnerHitBuilder::setVersion, SearchSourceBuilder.VERSION_FIELD); + PARSER.declareBoolean(InnerHitBuilder::setSeqNoAndPrimaryTerm, SearchSourceBuilder.SEQ_NO_PRIMARY_TERM_FIELD); PARSER.declareBoolean(InnerHitBuilder::setTrackScores, SearchSourceBuilder.TRACK_SCORES_FIELD); PARSER.declareStringArray(InnerHitBuilder::setStoredFieldNames, SearchSourceBuilder.STORED_FIELDS_FIELD); PARSER.declareObjectArray(InnerHitBuilder::setDocValueFields, @@ -117,7 +118,6 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject { }, COLLAPSE_FIELD, ObjectParser.ValueType.OBJECT); } - private String name; private boolean ignoreUnmapped; @@ -125,6 +125,7 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject { private int size = 3; private boolean explain; private boolean version; + private boolean seqNoAndPrimaryTerm; private boolean trackScores; private StoredFieldsContext storedFieldsContext; @@ -155,6 +156,11 @@ public InnerHitBuilder(StreamInput in) throws IOException { size = in.readVInt(); explain = in.readBoolean(); version = in.readBoolean(); + if (in.getVersion().onOrAfter(Version.V_7_0_0)){ + seqNoAndPrimaryTerm = in.readBoolean(); + } else { + seqNoAndPrimaryTerm = false; + } trackScores = in.readBoolean(); storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new); if (in.getVersion().before(Version.V_6_4_0)) { @@ -199,6 +205,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVInt(size); out.writeBoolean(explain); out.writeBoolean(version); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeBoolean(seqNoAndPrimaryTerm); + } out.writeBoolean(trackScores); out.writeOptionalWriteable(storedFieldsContext); if (out.getVersion().before(Version.V_6_4_0)) { @@ -299,6 +308,15 @@ public InnerHitBuilder setVersion(boolean version) { return this; } + public boolean isSeqNoAndPrimaryTerm() { + return seqNoAndPrimaryTerm; + } + + public InnerHitBuilder setSeqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) { + this.seqNoAndPrimaryTerm = seqNoAndPrimaryTerm; + return this; + } + public boolean isTrackScores() { return trackScores; } @@ -436,6 +454,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(SearchSourceBuilder.FROM_FIELD.getPreferredName(), from); builder.field(SearchSourceBuilder.SIZE_FIELD.getPreferredName(), size); builder.field(SearchSourceBuilder.VERSION_FIELD.getPreferredName(), version); + builder.field(SearchSourceBuilder.SEQ_NO_PRIMARY_TERM_FIELD.getPreferredName(), seqNoAndPrimaryTerm); builder.field(SearchSourceBuilder.EXPLAIN_FIELD.getPreferredName(), explain); builder.field(SearchSourceBuilder.TRACK_SCORES_FIELD.getPreferredName(), trackScores); if (fetchSourceContext != null) { @@ -494,6 +513,7 @@ public boolean equals(Object o) { Objects.equals(size, that.size) && Objects.equals(explain, that.explain) && Objects.equals(version, that.version) && + Objects.equals(seqNoAndPrimaryTerm, that.seqNoAndPrimaryTerm) && Objects.equals(trackScores, that.trackScores) && Objects.equals(storedFieldsContext, that.storedFieldsContext) && Objects.equals(docValueFields, that.docValueFields) && @@ -506,7 +526,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(name, ignoreUnmapped, from, size, explain, version, trackScores, + return Objects.hash(name, ignoreUnmapped, from, size, explain, version, seqNoAndPrimaryTerm, trackScores, storedFieldsContext, docValueFields, scriptFields, fetchSourceContext, sorts, highlightBuilder, innerCollapseBuilder); } diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java index 1fe781f38ce27..9e6a766aed891 100644 --- a/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java @@ -78,6 +78,7 @@ protected void setupInnerHitsContext(QueryShardContext queryShardContext, innerHitsContext.size(innerHitBuilder.getSize()); innerHitsContext.explain(innerHitBuilder.isExplain()); innerHitsContext.version(innerHitBuilder.isVersion()); + innerHitsContext.seqNoAndPrimaryTerm(innerHitBuilder.isSeqNoAndPrimaryTerm()); innerHitsContext.trackScores(innerHitBuilder.isTrackScores()); if (innerHitBuilder.getStoredFieldsContext() != null) { innerHitsContext.storedFieldsContext(innerHitBuilder.getStoredFieldsContext()); diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index d2b432e7c7ca1..3c3856e208f04 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -368,6 +368,14 @@ static final class NestedInnerHitSubContext extends InnerHitsContext.InnerHitSub this.childObjectMapper = childObjectMapper; } + @Override + public void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) { + assert seqNoAndPrimaryTerm() == false; + if (seqNoAndPrimaryTerm) { + throw new UnsupportedOperationException("nested documents are not assigned sequence numbers"); + } + } + @Override public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException { Weight innerHitQueryWeight = createInnerHitQueryWeight(); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 3e3a1e02a174b..da773efed580d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -201,6 +201,9 @@ private static void parseSearchSource(final SearchSourceBuilder searchSourceBuil if (request.hasParam("version")) { searchSourceBuilder.version(request.paramAsBoolean("version", null)); } + if (request.hasParam("seq_no_primary_term")) { + searchSourceBuilder.seqNoAndPrimaryTerm(request.paramAsBoolean("seq_no_primary_term", null)); + } if (request.hasParam("timeout")) { searchSourceBuilder.timeout(request.paramAsTime("timeout", null)); } diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index 590c58b1f6615..4b82e23e42061 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -107,6 +107,7 @@ final class DefaultSearchContext extends SearchContext { private ScrollContext scrollContext; private boolean explain; private boolean version = false; // by default, we don't return versions + private boolean seqAndPrimaryTerm = false; private StoredFieldsContext storedFields; private ScriptFieldsContext scriptFields; private FetchSourceContext fetchSourceContext; @@ -719,6 +720,16 @@ public void version(boolean version) { this.version = version; } + @Override + public boolean seqNoAndPrimaryTerm() { + return seqAndPrimaryTerm; + } + + @Override + public void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) { + this.seqAndPrimaryTerm = seqNoAndPrimaryTerm; + } + @Override public int[] docIdsToLoad() { return docIdsToLoad; diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index cb34804981f1b..42f96e52fb119 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.Explanation; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -46,6 +47,7 @@ import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.lookup.SourceLookup; @@ -91,6 +93,8 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable, parser.declareField((map, value) -> map.put(Fields._SCORE, value), SearchHit::parseScore, new ParseField(Fields._SCORE), ValueType.FLOAT_OR_NULL); parser.declareLong((map, value) -> map.put(Fields._VERSION, value), new ParseField(Fields._VERSION)); + parser.declareLong((map, value) -> map.put(Fields._SEQ_NO, value), new ParseField(Fields._SEQ_NO)); + parser.declareLong((map, value) -> map.put(Fields._PRIMARY_TERM, value), new ParseField(Fields._PRIMARY_TERM)); parser.declareField((map, value) -> map.put(Fields._SHARD, value), (p, c) -> ShardId.fromString(p.text()), new ParseField(Fields._SHARD), ValueType.STRING); parser.declareObject((map, value) -> map.put(SourceFieldMapper.NAME, value), (p, c) -> parseSourceBytes(p), @@ -588,6 +626,8 @@ public static SearchHit createFromMap(Map values) { } searchHit.score(get(Fields._SCORE, values, DEFAULT_SCORE)); searchHit.version(get(Fields._VERSION, values, -1L)); + searchHit.setSeqNo(get(Fields._SEQ_NO, values, SequenceNumbers.UNASSIGNED_SEQ_NO)); + searchHit.setPrimaryTerm(get(Fields._PRIMARY_TERM, values, SequenceNumbers.UNASSIGNED_PRIMARY_TERM)); searchHit.sortValues(get(Fields.SORT, values, SearchSortValues.EMPTY)); searchHit.highlightFields(get(Fields.HIGHLIGHT, values, null)); searchHit.sourceRef(get(SourceFieldMapper.NAME, values, null)); @@ -744,6 +784,10 @@ public void readFrom(StreamInput in) throws IOException { type = in.readOptionalText(); nestedIdentity = in.readOptionalWriteable(NestedIdentity::new); version = in.readLong(); + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + seqNo = in.readZLong(); + primaryTerm = in.readVLong(); + } source = in.readBytesReference(); if (source.length() == 0) { source = null; @@ -812,6 +856,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalText(type); out.writeOptionalWriteable(nestedIdentity); out.writeLong(version); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeZLong(seqNo); + out.writeVLong(primaryTerm); + } out.writeBytesReference(source); if (explanation == null) { out.writeBoolean(false); @@ -867,6 +915,8 @@ public boolean equals(Object obj) { && Objects.equals(type, other.type) && Objects.equals(nestedIdentity, other.nestedIdentity) && Objects.equals(version, other.version) + && Objects.equals(seqNo, other.seqNo) + && Objects.equals(primaryTerm, other.primaryTerm) && Objects.equals(source, other.source) && Objects.equals(fields, other.fields) && Objects.equals(getHighlightFields(), other.getHighlightFields()) @@ -880,8 +930,8 @@ public boolean equals(Object obj) { @Override public int hashCode() { - return Objects.hash(id, type, nestedIdentity, version, source, fields, getHighlightFields(), Arrays.hashCode(matchedQueries), - explanation, shard, innerHits, index, clusterAlias); + return Objects.hash(id, type, nestedIdentity, version, seqNo, primaryTerm, source, fields, getHighlightFields(), + Arrays.hashCode(matchedQueries), explanation, shard, innerHits, index, clusterAlias); } /** diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 2531685b94557..e75271c2885ae 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -240,6 +240,7 @@ import org.elasticsearch.search.fetch.subphase.MatchedQueriesFetchSubPhase; import org.elasticsearch.search.fetch.subphase.ScoreFetchSubPhase; import org.elasticsearch.search.fetch.subphase.ScriptFieldsFetchSubPhase; +import org.elasticsearch.search.fetch.subphase.SeqNoPrimaryTermFetchSubPhase; import org.elasticsearch.search.fetch.subphase.VersionFetchSubPhase; import org.elasticsearch.search.fetch.subphase.highlight.FastVectorHighlighter; import org.elasticsearch.search.fetch.subphase.highlight.HighlightPhase; @@ -727,6 +728,7 @@ private void registerFetchSubPhases(List plugins) { registerFetchSubPhase(new ScriptFieldsFetchSubPhase()); registerFetchSubPhase(new FetchSourceSubPhase()); registerFetchSubPhase(new VersionFetchSubPhase()); + registerFetchSubPhase(new SeqNoPrimaryTermFetchSubPhase()); registerFetchSubPhase(new MatchedQueriesFetchSubPhase()); registerFetchSubPhase(new HighlightPhase(highlighters)); registerFetchSubPhase(new ScoreFetchSubPhase()); diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 4fee684276288..ddec3637ed491 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -901,6 +901,11 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc if (source.version() != null) { context.version(source.version()); } + + if (source.seqNoAndPrimaryTerm() != null) { + context.seqNoAndPrimaryTerm(source.seqNoAndPrimaryTerm()); + } + if (source.stats() != null) { context.groupStats(source.stats()); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java index debbacdc6196c..ba51099d6bc00 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.metrics; +import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -66,6 +67,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder> sorts = null; private HighlightBuilder highlightBuilder; @@ -85,6 +87,7 @@ protected TopHitsAggregationBuilder(TopHitsAggregationBuilder clone, this.size = clone.size; this.explain = clone.explain; this.version = clone.version; + this.seqNoAndPrimaryTerm = clone.seqNoAndPrimaryTerm; this.trackScores = clone.trackScores; this.sorts = clone.sorts == null ? null : new ArrayList<>(clone.sorts); this.highlightBuilder = clone.highlightBuilder == null ? null : @@ -137,6 +140,9 @@ public TopHitsAggregationBuilder(StreamInput in) throws IOException { } trackScores = in.readBoolean(); version = in.readBoolean(); + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + seqNoAndPrimaryTerm = in.readBoolean(); + } } @Override @@ -173,6 +179,9 @@ protected void doWriteTo(StreamOutput out) throws IOException { } out.writeBoolean(trackScores); out.writeBoolean(version); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeBoolean(seqNoAndPrimaryTerm); + } } /** @@ -526,6 +535,23 @@ public boolean version() { return version; } + /** + * Should each {@link org.elasticsearch.search.SearchHit} be returned with the + * sequence number and primary term of the last modification of the document. + */ + public TopHitsAggregationBuilder seqNoAndPrimaryTerm(Boolean seqNoAndPrimaryTerm) { + this.seqNoAndPrimaryTerm = seqNoAndPrimaryTerm; + return this; + } + + /** + * Indicates whether {@link org.elasticsearch.search.SearchHit}s should be returned with the + * sequence number and primary term of the last modification of the document. + */ + public Boolean seqNoAndPrimaryTerm() { + return seqNoAndPrimaryTerm; + } + /** * Applies when sorting, and controls if scores will be tracked as well. * Defaults to {@code false}. @@ -579,8 +605,9 @@ protected TopHitsAggregatorFactory doBuild(SearchContext context, AggregatorFact } else { optionalSort = SortBuilder.buildSort(sorts, context.getQueryShardContext()); } - return new TopHitsAggregatorFactory(name, from, size, explain, version, trackScores, optionalSort, highlightBuilder, - storedFieldsContext, docValueFields, fields, fetchSourceContext, context, parent, subfactoriesBuilder, metaData); + return new TopHitsAggregatorFactory(name, from, size, explain, version, seqNoAndPrimaryTerm, trackScores, optionalSort, + highlightBuilder, storedFieldsContext, docValueFields, fields, fetchSourceContext, context, parent, subfactoriesBuilder, + metaData); } @Override @@ -589,6 +616,7 @@ protected XContentBuilder internalXContent(XContentBuilder builder, Params param builder.field(SearchSourceBuilder.FROM_FIELD.getPreferredName(), from); builder.field(SearchSourceBuilder.SIZE_FIELD.getPreferredName(), size); builder.field(SearchSourceBuilder.VERSION_FIELD.getPreferredName(), version); + builder.field(SearchSourceBuilder.SEQ_NO_PRIMARY_TERM_FIELD.getPreferredName(), seqNoAndPrimaryTerm); builder.field(SearchSourceBuilder.EXPLAIN_FIELD.getPreferredName(), explain); if (fetchSourceContext != null) { builder.field(SearchSourceBuilder._SOURCE_FIELD.getPreferredName(), fetchSourceContext); @@ -646,6 +674,8 @@ public static TopHitsAggregationBuilder parse(String aggregationName, XContentPa factory.size(parser.intValue()); } else if (SearchSourceBuilder.VERSION_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { factory.version(parser.booleanValue()); + } else if (SearchSourceBuilder.SEQ_NO_PRIMARY_TERM_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + factory.seqNoAndPrimaryTerm(parser.booleanValue()); } else if (SearchSourceBuilder.EXPLAIN_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { factory.explain(parser.booleanValue()); } else if (SearchSourceBuilder.TRACK_SCORES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { @@ -745,7 +775,7 @@ public static TopHitsAggregationBuilder parse(String aggregationName, XContentPa @Override protected int doHashCode() { return Objects.hash(explain, fetchSourceContext, docValueFields, storedFieldsContext, from, highlightBuilder, - scriptFields, size, sorts, trackScores, version); + scriptFields, size, sorts, trackScores, version, seqNoAndPrimaryTerm); } @Override @@ -761,7 +791,8 @@ protected boolean doEquals(Object obj) { && Objects.equals(size, other.size) && Objects.equals(sorts, other.sorts) && Objects.equals(trackScores, other.trackScores) - && Objects.equals(version, other.version); + && Objects.equals(version, other.version) + && Objects.equals(seqNoAndPrimaryTerm, other.seqNoAndPrimaryTerm); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java index 6086942955122..7edaccb66d4bd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java @@ -44,6 +44,7 @@ class TopHitsAggregatorFactory extends AggregatorFactory sort; private final HighlightBuilder highlightBuilder; @@ -52,8 +53,8 @@ class TopHitsAggregatorFactory extends AggregatorFactory scriptFields; private final FetchSourceContext fetchSourceContext; - TopHitsAggregatorFactory(String name, int from, int size, boolean explain, boolean version, boolean trackScores, - Optional sort, HighlightBuilder highlightBuilder, StoredFieldsContext storedFieldsContext, + TopHitsAggregatorFactory(String name, int from, int size, boolean explain, boolean version, boolean seqNoAndPrimaryTerm, + boolean trackScores, Optional sort, HighlightBuilder highlightBuilder, StoredFieldsContext storedFieldsContext, List docValueFields, List scriptFields, FetchSourceContext fetchSourceContext, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactories, Map metaData) throws IOException { @@ -62,6 +63,7 @@ class TopHitsAggregatorFactory extends AggregatorFactory> sorts; private boolean trackScores = false; @@ -247,6 +250,11 @@ public SearchSourceBuilder(StreamInput in) throws IOException { timeout = in.readOptionalTimeValue(); trackScores = in.readBoolean(); version = in.readOptionalBoolean(); + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + seqNoAndPrimaryTerm = in.readOptionalBoolean(); + } else { + seqNoAndPrimaryTerm = null; + } extBuilders = in.readNamedWriteableList(SearchExtBuilder.class); profile = in.readBoolean(); searchAfterBuilder = in.readOptionalWriteable(SearchAfterBuilder::new); @@ -310,6 +318,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalTimeValue(timeout); out.writeBoolean(trackScores); out.writeOptionalBoolean(version); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeOptionalBoolean(seqNoAndPrimaryTerm); + } out.writeNamedWriteableList(extBuilders); out.writeBoolean(profile); out.writeOptionalWriteable(searchAfterBuilder); @@ -441,6 +452,23 @@ public Boolean version() { return version; } + /** + * Should each {@link org.elasticsearch.search.SearchHit} be returned with the + * sequence number and primary term of the last modification of the document. + */ + public SearchSourceBuilder seqNoAndPrimaryTerm(Boolean seqNoAndPrimaryTerm) { + this.seqNoAndPrimaryTerm = seqNoAndPrimaryTerm; + return this; + } + + /** + * Indicates whether {@link org.elasticsearch.search.SearchHit}s should be returned with the + * sequence number and primary term of the last modification of the document. + */ + public Boolean seqNoAndPrimaryTerm() { + return seqNoAndPrimaryTerm; + } + /** * An optional timeout to control how long search is allowed to take. */ @@ -999,6 +1027,7 @@ private SearchSourceBuilder shallowCopy(QueryBuilder queryBuilder, QueryBuilder rewrittenBuilder.trackScores = trackScores; rewrittenBuilder.trackTotalHitsUpTo = trackTotalHitsUpTo; rewrittenBuilder.version = version; + rewrittenBuilder.seqNoAndPrimaryTerm = seqNoAndPrimaryTerm; rewrittenBuilder.collapse = collapse; return rewrittenBuilder; } @@ -1038,6 +1067,8 @@ public void parseXContent(XContentParser parser, boolean checkTrailingTokens) th minScore = parser.floatValue(); } else if (VERSION_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { version = parser.booleanValue(); + } else if (SEQ_NO_PRIMARY_TERM_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + seqNoAndPrimaryTerm = parser.booleanValue(); } else if (EXPLAIN_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { explain = parser.booleanValue(); } else if (TRACK_SCORES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { @@ -1205,6 +1236,10 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) t builder.field(VERSION_FIELD.getPreferredName(), version); } + if (seqNoAndPrimaryTerm != null) { + builder.field(SEQ_NO_PRIMARY_TERM_FIELD.getPreferredName(), seqNoAndPrimaryTerm); + } + if (explain != null) { builder.field(EXPLAIN_FIELD.getPreferredName(), explain); } @@ -1523,7 +1558,7 @@ public int hashCode() { return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldsContext, from, highlightBuilder, indexBoosts, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, size, sorts, searchAfterBuilder, sliceBuilder, stats, suggestBuilder, terminateAfter, timeout, trackScores, version, - profile, extBuilders, collapse, trackTotalHitsUpTo); + seqNoAndPrimaryTerm, profile, extBuilders, collapse, trackTotalHitsUpTo); } @Override @@ -1558,6 +1593,7 @@ public boolean equals(Object obj) { && Objects.equals(timeout, other.timeout) && Objects.equals(trackScores, other.trackScores) && Objects.equals(version, other.version) + && Objects.equals(seqNoAndPrimaryTerm, other.seqNoAndPrimaryTerm) && Objects.equals(profile, other.profile) && Objects.equals(extBuilders, other.extBuilders) && Objects.equals(collapse, other.collapse) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermFetchSubPhase.java new file mode 100644 index 0000000000000..31a6328ff9574 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/SeqNoPrimaryTermFetchSubPhase.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.fetch.subphase; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.ReaderUtil; +import org.elasticsearch.index.mapper.SeqNoFieldMapper; +import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.fetch.FetchSubPhase; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Comparator; + +public final class SeqNoPrimaryTermFetchSubPhase implements FetchSubPhase { + @Override + public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException { + if (context.seqNoAndPrimaryTerm() == false) { + return; + } + + hits = hits.clone(); // don't modify the incoming hits + Arrays.sort(hits, Comparator.comparingInt(SearchHit::docId)); + + int lastReaderId = -1; + NumericDocValues seqNoField = null; + NumericDocValues primaryTermField = null; + for (SearchHit hit : hits) { + int readerId = ReaderUtil.subIndex(hit.docId(), context.searcher().getIndexReader().leaves()); + LeafReaderContext subReaderContext = context.searcher().getIndexReader().leaves().get(readerId); + if (lastReaderId != readerId) { + seqNoField = subReaderContext.reader().getNumericDocValues(SeqNoFieldMapper.NAME); + primaryTermField = subReaderContext.reader().getNumericDocValues(SeqNoFieldMapper.PRIMARY_TERM_NAME); + lastReaderId = readerId; + } + int docId = hit.docId() - subReaderContext.docBase; + long seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; + long primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM; + // we have to check the primary term field as it is only assigned for non-nested documents + if (primaryTermField != null && primaryTermField.advanceExact(docId)) { + boolean found = seqNoField.advanceExact(docId); + assert found: "found seq no for " + docId + " but not a primary term"; + seqNo = seqNoField.longValue(); + primaryTerm = primaryTermField.longValue(); + } + hit.setSeqNo(seqNo); + hit.setPrimaryTerm(primaryTerm); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index 3a7fb9f823f3a..ecde28719cec6 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -422,6 +422,16 @@ public void version(boolean version) { in.version(version); } + @Override + public boolean seqNoAndPrimaryTerm() { + return in.seqNoAndPrimaryTerm(); + } + + @Override + public void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) { + in.seqNoAndPrimaryTerm(seqNoAndPrimaryTerm); + } + @Override public int[] docIdsToLoad() { return in.docIdsToLoad(); diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 768143dd8fb0b..bd6d9c501c8d1 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -38,7 +38,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ObjectMapper; -import org.elasticsearch.search.collapse.CollapseContext; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; @@ -46,6 +45,7 @@ import org.elasticsearch.search.SearchExtBuilder; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.SearchContextAggregations; +import org.elasticsearch.search.collapse.CollapseContext; import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSearchResult; @@ -309,6 +309,12 @@ public InnerHitsContext innerHits() { public abstract void version(boolean version); + /** indicates whether the sequence number and primary term of the last modification to each hit should be returned */ + public abstract boolean seqNoAndPrimaryTerm(); + + /** controls whether the sequence number and primary term of the last modification to each hit should be returned */ + public abstract void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm); + public abstract int[] docIdsToLoad(); public abstract int docIdsToLoadFrom(); diff --git a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java index 8c8137f5e4345..fb4d233f10ee8 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java @@ -65,6 +65,7 @@ public class SubSearchContext extends FilteredSearchContext { private boolean explain; private boolean trackScores; private boolean version; + private boolean seqNoAndPrimaryTerm; public SubSearchContext(SearchContext context) { super(context); @@ -294,6 +295,16 @@ public void version(boolean version) { this.version = version; } + @Override + public boolean seqNoAndPrimaryTerm() { + return seqNoAndPrimaryTerm; + } + + @Override + public void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) { + this.seqNoAndPrimaryTerm = seqNoAndPrimaryTerm; + } + @Override public int[] docIdsToLoad() { return docIdsToLoad; diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index 9378a2cdd86bb..328950e4f3569 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -241,6 +241,7 @@ public void run() throws IOException { public void testExpandRequestOptions() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); boolean version = randomBoolean(); + final boolean seqNoAndTerm = randomBoolean(); mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null) { @Override @@ -249,13 +250,14 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL assertTrue(request.requests().stream().allMatch((r) -> "foo".equals(r.preference()))); assertTrue(request.requests().stream().allMatch((r) -> "baz".equals(r.routing()))); assertTrue(request.requests().stream().allMatch((r) -> version == r.source().version())); + assertTrue(request.requests().stream().allMatch((r) -> seqNoAndTerm == r.source().seqNoAndPrimaryTerm())); assertTrue(request.requests().stream().allMatch((r) -> postFilter.equals(r.source().postFilter()))); } }; mockSearchPhaseContext.getRequest().source(new SearchSourceBuilder() .collapse( new CollapseBuilder("someField") - .setInnerHits(new InnerHitBuilder().setName("foobarbaz").setVersion(version)) + .setInnerHits(new InnerHitBuilder().setName("foobarbaz").setVersion(version).setSeqNoAndPrimaryTerm(seqNoAndTerm)) ) .postFilter(QueryBuilders.existsQuery("foo"))) .preference("foobar") diff --git a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index 5c76c77b5c888..257ee807419b6 100644 --- a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -140,6 +140,11 @@ public void testEqualsAndHashcode() { } } + public static InnerHitBuilder randomNestedInnerHits() { + InnerHitBuilder innerHitBuilder = randomInnerHits(); + innerHitBuilder.setSeqNoAndPrimaryTerm(false); // not supported by nested queries + return innerHitBuilder; + } public static InnerHitBuilder randomInnerHits() { InnerHitBuilder innerHits = new InnerHitBuilder(); innerHits.setName(randomAlphaOfLengthBetween(1, 16)); @@ -147,6 +152,7 @@ public static InnerHitBuilder randomInnerHits() { innerHits.setSize(randomIntBetween(0, 32)); innerHits.setExplain(randomBoolean()); innerHits.setVersion(randomBoolean()); + innerHits.setSeqNoAndPrimaryTerm(randomBoolean()); innerHits.setTrackScores(randomBoolean()); if (randomBoolean()) { innerHits.setStoredFieldNames(randomListStuff(16, () -> randomAlphaOfLengthBetween(1, 16))); @@ -189,6 +195,7 @@ static InnerHitBuilder mutate(InnerHitBuilder original) throws IOException { modifiers.add(() -> copy.setSize(randomValueOtherThan(copy.getSize(), () -> randomIntBetween(0, 128)))); modifiers.add(() -> copy.setExplain(!copy.isExplain())); modifiers.add(() -> copy.setVersion(!copy.isVersion())); + modifiers.add(() -> copy.setSeqNoAndPrimaryTerm(!copy.isSeqNoAndPrimaryTerm())); modifiers.add(() -> copy.setTrackScores(!copy.isTrackScores())); modifiers.add(() -> copy.setName(randomValueOtherThan(copy.getName(), () -> randomAlphaOfLengthBetween(1, 16)))); modifiers.add(() -> { diff --git a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index 76479791283b4..ac9ae8d0fa7fb 100644 --- a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -45,7 +45,7 @@ import java.util.Map; import static org.elasticsearch.index.IndexSettingsTests.newIndexMeta; -import static org.elasticsearch.index.query.InnerHitBuilderTests.randomInnerHits; +import static org.elasticsearch.index.query.InnerHitBuilderTests.randomNestedInnerHits; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; @@ -267,7 +267,7 @@ public void testThatUnrecognizedFromStringThrowsException() { } public void testInlineLeafInnerHitsNestedQuery() throws Exception { - InnerHitBuilder leafInnerHits = randomInnerHits(); + InnerHitBuilder leafInnerHits = randomNestedInnerHits(); NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None); nestedQueryBuilder.innerHit(leafInnerHits); Map innerHitBuilders = new HashMap<>(); @@ -276,7 +276,7 @@ public void testInlineLeafInnerHitsNestedQuery() throws Exception { } public void testInlineLeafInnerHitsNestedQueryViaBoolQuery() { - InnerHitBuilder leafInnerHits = randomInnerHits(); + InnerHitBuilder leafInnerHits = randomNestedInnerHits(); NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None) .innerHit(leafInnerHits); BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder().should(nestedQueryBuilder); @@ -286,7 +286,7 @@ public void testInlineLeafInnerHitsNestedQueryViaBoolQuery() { } public void testInlineLeafInnerHitsNestedQueryViaConstantScoreQuery() { - InnerHitBuilder leafInnerHits = randomInnerHits(); + InnerHitBuilder leafInnerHits = randomNestedInnerHits(); NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None) .innerHit(leafInnerHits); ConstantScoreQueryBuilder constantScoreQueryBuilder = new ConstantScoreQueryBuilder(nestedQueryBuilder); @@ -296,10 +296,10 @@ public void testInlineLeafInnerHitsNestedQueryViaConstantScoreQuery() { } public void testInlineLeafInnerHitsNestedQueryViaBoostingQuery() { - InnerHitBuilder leafInnerHits1 = randomInnerHits(); + InnerHitBuilder leafInnerHits1 = randomNestedInnerHits(); NestedQueryBuilder nestedQueryBuilder1 = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None) .innerHit(leafInnerHits1); - InnerHitBuilder leafInnerHits2 = randomInnerHits(); + InnerHitBuilder leafInnerHits2 = randomNestedInnerHits(); NestedQueryBuilder nestedQueryBuilder2 = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None) .innerHit(leafInnerHits2); BoostingQueryBuilder constantScoreQueryBuilder = new BoostingQueryBuilder(nestedQueryBuilder1, nestedQueryBuilder2); @@ -310,7 +310,7 @@ public void testInlineLeafInnerHitsNestedQueryViaBoostingQuery() { } public void testInlineLeafInnerHitsNestedQueryViaFunctionScoreQuery() { - InnerHitBuilder leafInnerHits = randomInnerHits(); + InnerHitBuilder leafInnerHits = randomNestedInnerHits(); NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None) .innerHit(leafInnerHits); FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(nestedQueryBuilder); @@ -330,7 +330,7 @@ public void testBuildIgnoreUnmappedNestQuery() throws Exception { when(mapperService.getIndexSettings()).thenReturn(settings); when(searchContext.mapperService()).thenReturn(mapperService); - InnerHitBuilder leafInnerHits = randomInnerHits(); + InnerHitBuilder leafInnerHits = randomNestedInnerHits(); NestedQueryBuilder query1 = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None); query1.innerHit(leafInnerHits); final Map innerHitBuilders = new HashMap<>(); diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java index fee55f1e22f23..4831729201183 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -90,6 +90,11 @@ public static SearchHit createTestItem(XContentType xContentType, boolean withOp if (randomBoolean()) { hit.version(randomLong()); } + + if (randomBoolean()) { + hit.version(randomNonNegativeLong()); + hit.version(randomLongBetween(1, Long.MAX_VALUE)); + } if (randomBoolean()) { hit.sortValues(SearchSortValuesTests.createTestItem(xContentType, transportSerialization)); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index ce8d9c2da834f..4a5982b9dacfa 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.MockScriptPlugin; @@ -83,6 +84,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -578,6 +580,7 @@ public void testFieldCollapsing() throws Exception { } public void testFetchFeatures() { + final boolean seqNoAndTerm = randomBoolean(); SearchResponse response = client().prepareSearch("idx") .setQuery(matchQuery("text", "text").queryName("test")) .addAggregation(terms("terms") @@ -593,6 +596,7 @@ public void testFetchFeatures() { new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) .fetchSource("text", null) .version(true) + .seqNoAndPrimaryTerm(seqNoAndTerm) ) ) .get(); @@ -620,6 +624,14 @@ public void testFetchFeatures() { long version = hit.getVersion(); assertThat(version, equalTo(1L)); + if (seqNoAndTerm) { + assertThat(hit.getSeqNo(), greaterThanOrEqualTo(0L)); + assertThat(hit.getPrimaryTerm(), greaterThanOrEqualTo(1L)); + } else { + assertThat(hit.getSeqNo(), equalTo(SequenceNumbers.UNASSIGNED_SEQ_NO)); + assertThat(hit.getPrimaryTerm(), equalTo(SequenceNumbers.UNASSIGNED_PRIMARY_TERM)); + } + assertThat(hit.getMatchedQueries()[0], equalTo("test")); DocumentField field = hit.field("field1"); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java index caf1d4ef4ee47..6ec1ea1cad301 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java @@ -54,6 +54,9 @@ protected final TopHitsAggregationBuilder createTestAggregatorBuilder() { if (randomBoolean()) { factory.version(randomBoolean()); } + if (randomBoolean()) { + factory.seqNoAndPrimaryTerm(randomBoolean()); + } if (randomBoolean()) { factory.trackScores(randomBoolean()); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java index 5d96cd37b054d..6ec2732aaf915 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java @@ -145,6 +145,9 @@ public static SearchSourceBuilder randomSearchSourceBuilder( if (randomBoolean()) { builder.version(randomBoolean()); } + if (randomBoolean()) { + builder.seqNoAndPrimaryTerm(randomBoolean()); + } if (randomBoolean()) { builder.trackScores(randomBoolean()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 18edb5ec3790a..6a17f65790368 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -504,6 +504,16 @@ public boolean version() { public void version(boolean version) { } + @Override + public boolean seqNoAndPrimaryTerm() { + return false; + } + + @Override + public void seqNoAndPrimaryTerm(boolean seqNoAndPrimaryTerm) { + + } + @Override public int[] docIdsToLoad() { return new int[0]; From 95a6951f78524cf3ff64df5677f8a840b21fb141 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 23 Jan 2019 09:46:28 +0100 Subject: [PATCH 11/24] Use new bulk API endpoint in the docs (#37698) This change switches to using the typeless bulk API endpoint in the documentation snippets where possible --- .../bucket/adjacency-matrix-aggregation.asciidoc | 2 +- .../aggregations/bucket/filters-aggregation.asciidoc | 2 +- .../metrics/scripted-metric-aggregation.asciidoc | 2 +- docs/reference/getting-started.asciidoc | 6 +++--- docs/reference/search/validate.asciidoc | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/reference/aggregations/bucket/adjacency-matrix-aggregation.asciidoc b/docs/reference/aggregations/bucket/adjacency-matrix-aggregation.asciidoc index 1806f05d2c686..5cd67877460bf 100644 --- a/docs/reference/aggregations/bucket/adjacency-matrix-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/adjacency-matrix-aggregation.asciidoc @@ -30,7 +30,7 @@ Example: [source,js] -------------------------------------------------- -PUT /emails/_doc/_bulk?refresh +PUT /emails/_bulk?refresh { "index" : { "_id" : 1 } } { "accounts" : ["hillary", "sidney"]} { "index" : { "_id" : 2 } } diff --git a/docs/reference/aggregations/bucket/filters-aggregation.asciidoc b/docs/reference/aggregations/bucket/filters-aggregation.asciidoc index b7e3b1edf10d2..94b91654f0c7f 100644 --- a/docs/reference/aggregations/bucket/filters-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/filters-aggregation.asciidoc @@ -9,7 +9,7 @@ Example: [source,js] -------------------------------------------------- -PUT /logs/_doc/_bulk?refresh +PUT /logs/_bulk?refresh { "index" : { "_id" : 1 } } { "body" : "warning: page could not be rendered" } { "index" : { "_id" : 2 } } diff --git a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc index 69c9d50690196..1f2ec113d31fd 100644 --- a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -147,7 +147,7 @@ Imagine a situation where you index the following documents into an index with 2 [source,js] -------------------------------------------------- -PUT /transactions/_doc/_bulk?refresh +PUT /transactions/_bulk?refresh {"index":{"_id":1}} {"type": "sale","amount": 80} {"index":{"_id":2}} diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index b79dd5c36c244..d32eeaff8c719 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -633,7 +633,7 @@ As a quick example, the following call indexes two documents (ID 1 - John Doe an [source,js] -------------------------------------------------- -POST /customer/_doc/_bulk?pretty +POST /customer/_bulk?pretty {"index":{"_id":"1"}} {"name": "John Doe" } {"index":{"_id":"2"}} @@ -645,7 +645,7 @@ This example updates the first document (ID of 1) and then deletes the second do [source,sh] -------------------------------------------------- -POST /customer/_doc/_bulk?pretty +POST /customer/_bulk?pretty {"update":{"_id":"1"}} {"doc": { "name": "John Doe becomes Jane Doe" } } {"delete":{"_id":"2"}} @@ -692,7 +692,7 @@ You can download the sample dataset (accounts.json) from https://github.com/elas [source,sh] -------------------------------------------------- -curl -H "Content-Type: application/json" -XPOST "localhost:9200/bank/_doc/_bulk?pretty&refresh" --data-binary "@accounts.json" +curl -H "Content-Type: application/json" -XPOST "localhost:9200/bank/_bulk?pretty&refresh" --data-binary "@accounts.json" curl "localhost:9200/_cat/indices?v" -------------------------------------------------- // NOTCONSOLE diff --git a/docs/reference/search/validate.asciidoc b/docs/reference/search/validate.asciidoc index c17f1393e1520..47eab847336ce 100644 --- a/docs/reference/search/validate.asciidoc +++ b/docs/reference/search/validate.asciidoc @@ -6,7 +6,7 @@ without executing it. We'll use the following test data to explain _validate: [source,js] -------------------------------------------------- -PUT twitter/_doc/_bulk?refresh +PUT twitter/_bulk?refresh {"index":{"_id":1}} {"user" : "kimchy", "post_date" : "2009-11-15T14:12:12", "message" : "trying out Elasticsearch"} {"index":{"_id":2}} From 6926a73d618be8bb8329e4f98eadcdf1fb4bb17d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 23 Jan 2019 09:46:49 +0100 Subject: [PATCH 12/24] Fix edge case in PutMappingRequestTests (#37665) The recently introduced client side PutMappingRequestTests has an edge case that leads to failing tests. When the "source" or the original request is `null` it gets rendered to xContent as an empty object, which by the test class itself is parsed back as an empty map. For this reason the original and the parsed request differ (one has an empty source, one an empty map). This fixes this edge case by assuming an empty map means a null source in the request. In practice the distinction doesn't matter because all the client side request does is write itself to xContent, which gives the same result regardless of whether `source` is null or empty. Closes #37654 --- .../indices/PutMappingRequestTests.java | 23 ++++++++++++------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/PutMappingRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/PutMappingRequestTests.java index aff64533ece0f..50224aa1b9ad7 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/PutMappingRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/PutMappingRequestTests.java @@ -19,15 +19,14 @@ package org.elasticsearch.client.indices; -import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.RandomCreateIndexGenerator; import org.elasticsearch.test.AbstractXContentTestCase; import java.io.IOException; +import java.util.Map; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37654") public class PutMappingRequestTests extends AbstractXContentTestCase { @Override @@ -47,7 +46,10 @@ protected PutMappingRequest createTestInstance() { @Override protected PutMappingRequest doParseInstance(XContentParser parser) throws IOException { PutMappingRequest request = new PutMappingRequest(); - request.source(parser.map()); + Map map = parser.map(); + if (map.isEmpty() == false) { + request.source(map); + } return request; } @@ -58,11 +60,16 @@ protected boolean supportsUnknownFields() { @Override protected void assertEqualInstances(PutMappingRequest expected, PutMappingRequest actual) { - try (XContentParser expectedJson = createParser(expected.xContentType().xContent(), expected.source()); - XContentParser actualJson = createParser(actual.xContentType().xContent(), actual.source())) { - assertEquals(expectedJson.mapOrdered(), actualJson.mapOrdered()); - } catch (IOException e) { - throw new RuntimeException(e); + if (actual.source() != null) { + try (XContentParser expectedJson = createParser(expected.xContentType().xContent(), expected.source()); + XContentParser actualJson = createParser(actual.xContentType().xContent(), actual.source())) { + assertEquals(expectedJson.mapOrdered(), actualJson.mapOrdered()); + } catch (IOException e) { + throw new RuntimeException(e); + } + } else { + // if the original `source` is null, the parsed source should be so too + assertNull(expected.source()); } } } From b3f9becf5f99e718b751af81bb31ae1c0779fdcc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 23 Jan 2019 09:48:00 +0100 Subject: [PATCH 13/24] Modify removal_of_types.asciidoc (#37648) After switching the default behaviour of "include_type_name" to "false" in 7.0, some parts of the types removal documentation can be adapted as well. --- .../mapping/removal_of_types.asciidoc | 30 +++++++++++-------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/docs/reference/mapping/removal_of_types.asciidoc b/docs/reference/mapping/removal_of_types.asciidoc index 251025c1ba24c..ee5ee4b4fe664 100644 --- a/docs/reference/mapping/removal_of_types.asciidoc +++ b/docs/reference/mapping/removal_of_types.asciidoc @@ -112,7 +112,7 @@ have looked something like this: [source,js] ---- -PUT twitter?include_type_name=true +PUT twitter { "mappings": { "user": { @@ -157,16 +157,16 @@ GET twitter/tweet/_search ---- // NOTCONSOLE -You could achieve the same thing by adding a custom `type` field as follows: +You can achieve the same thing by adding a custom `type` field as follows: [source,js] ---- -PUT twitter?include_type_name=true +PUT twitter?include_type_name=true <1> { "mappings": { "_doc": { "properties": { - "type": { "type": "keyword" }, <1> + "type": { "type": "keyword" }, <2> "name": { "type": "text" }, "user_name": { "type": "keyword" }, "email": { "type": "keyword" }, @@ -204,7 +204,7 @@ GET twitter/_search }, "filter": { "match": { - "type": "tweet" <1> + "type": "tweet" <2> } } } @@ -212,7 +212,9 @@ GET twitter/_search } ---- // NOTCONSOLE -<1> The explicit `type` field takes the place of the implicit `_type` field. +<1> Use `include_type_name=true` in case need to use the "old" syntax including the "_doc" object like +in this example +<2> The explicit `type` field takes the place of the implicit `_type` field. [float] ==== Parent/Child without mapping types @@ -299,7 +301,7 @@ This first example splits our `twitter` index into a `tweets` index and a [source,js] ---- -PUT users?include_type_name=true +PUT users { "settings": { "index.mapping.single_type": true @@ -321,7 +323,7 @@ PUT users?include_type_name=true } } -PUT tweets?include_type_name=true +PUT tweets { "settings": { "index.mapping.single_type": true @@ -376,7 +378,7 @@ documents of different types which have conflicting IDs: [source,js] ---- -PUT new_twitter?include_type_name=true +PUT new_twitter { "mappings": { "_doc": { @@ -427,10 +429,12 @@ POST _reindex [float] === Use `include_type_name=false` to prepare for upgrade to 8.0 -Index creation, mappings and document APIs support the `include_type_name` -option. When set to `false`, this option enables the behavior that will become -default in 8.0 when types are removed. See some examples of interactions with -Elasticsearch with this option turned off: +Index creation and mapping APIs support a new `include_type_name` url parameter +starting with version 6.7. It will default to `true` in version 6.7, default to +`false` in version 7.0 and will be removed in version 8.0. When set to `true`, +this parameter enables the pre-7.0 behavior of using type names in the API. + +See some examples of interactions with Elasticsearch with this option turned off: [float] ==== Index creation From 7b3dd3022da261a6f299567091034548a80a5ebd Mon Sep 17 00:00:00 2001 From: David Roberts Date: Wed, 23 Jan 2019 09:37:37 +0000 Subject: [PATCH 14/24] [ML] Update ML results mappings on process start (#37706) This change moves the update to the results index mappings from the open job action to the code that starts the autodetect process. When a rolling upgrade is performed we need to update the mappings for already-open jobs that are reassigned from an old version node to a new version node, but the open job action is not called in this case. Closes #37607 --- .../persistence/ElasticsearchMappings.java | 114 +++++++++++++++++ .../ElasticsearchMappingsTests.java | 100 +++++++++++++++ .../ml/action/TransportOpenJobAction.java | 121 +----------------- .../autodetect/AutodetectProcessManager.java | 18 ++- .../action/TransportOpenJobActionTests.java | 93 -------------- .../AutodetectProcessManagerTests.java | 1 + .../upgrades/MlMappingsUpgradeIT.java | 101 +++++++++++++++ 7 files changed, 336 insertions(+), 212 deletions(-) create mode 100644 x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java index fb0db771fa581..0eb2e666916dc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java @@ -5,8 +5,24 @@ */ package org.elasticsearch.xpack.core.ml.job.persistence; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.AliasOrIndex; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.CheckedSupplier; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.Index; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.xpack.core.ml.datafeed.ChunkingConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DelayedDataCheckConfig; @@ -38,10 +54,16 @@ import org.elasticsearch.xpack.core.ml.notifications.AuditMessage; import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.List; +import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; /** * Static methods to create Elasticsearch index mappings for the autodetect @@ -107,6 +129,8 @@ public class ElasticsearchMappings { static final String RAW = "raw"; + private static final Logger logger = LogManager.getLogger(ElasticsearchMappings.class); + private ElasticsearchMappings() { } @@ -964,4 +988,94 @@ public static XContentBuilder auditMessageMapping() throws IOException { .endObject() .endObject(); } + + static String[] mappingRequiresUpdate(ClusterState state, String[] concreteIndices, Version minVersion) throws IOException { + List indicesToUpdate = new ArrayList<>(); + + ImmutableOpenMap> currentMapping = state.metaData().findMappings(concreteIndices, + new String[] {DOC_TYPE}, MapperPlugin.NOOP_FIELD_FILTER); + + for (String index : concreteIndices) { + ImmutableOpenMap innerMap = currentMapping.get(index); + if (innerMap != null) { + MappingMetaData metaData = innerMap.get(DOC_TYPE); + try { + @SuppressWarnings("unchecked") + Map meta = (Map) metaData.sourceAsMap().get("_meta"); + if (meta != null) { + String versionString = (String) meta.get("version"); + if (versionString == null) { + logger.info("Version of mappings for [{}] not found, recreating", index); + indicesToUpdate.add(index); + continue; + } + + Version mappingVersion = Version.fromString(versionString); + + if (mappingVersion.onOrAfter(minVersion)) { + continue; + } else { + logger.info("Mappings for [{}] are outdated [{}], updating it[{}].", index, mappingVersion, Version.CURRENT); + indicesToUpdate.add(index); + continue; + } + } else { + logger.info("Version of mappings for [{}] not found, recreating", index); + indicesToUpdate.add(index); + continue; + } + } catch (Exception e) { + logger.error(new ParameterizedMessage("Failed to retrieve mapping version for [{}], recreating", index), e); + indicesToUpdate.add(index); + continue; + } + } else { + logger.info("No mappings found for [{}], recreating", index); + indicesToUpdate.add(index); + } + } + return indicesToUpdate.toArray(new String[indicesToUpdate.size()]); + } + + public static void addDocMappingIfMissing(String alias, CheckedSupplier mappingSupplier, + Client client, ClusterState state, ActionListener listener) { + AliasOrIndex aliasOrIndex = state.metaData().getAliasAndIndexLookup().get(alias); + if (aliasOrIndex == null) { + // The index has never been created yet + listener.onResponse(true); + return; + } + String[] concreteIndices = aliasOrIndex.getIndices().stream().map(IndexMetaData::getIndex).map(Index::getName) + .toArray(String[]::new); + + String[] indicesThatRequireAnUpdate; + try { + indicesThatRequireAnUpdate = mappingRequiresUpdate(state, concreteIndices, Version.CURRENT); + } catch (IOException e) { + listener.onFailure(e); + return; + } + + if (indicesThatRequireAnUpdate.length > 0) { + try (XContentBuilder mapping = mappingSupplier.get()) { + PutMappingRequest putMappingRequest = new PutMappingRequest(indicesThatRequireAnUpdate); + putMappingRequest.type(DOC_TYPE); + putMappingRequest.source(mapping); + executeAsyncWithOrigin(client, ML_ORIGIN, PutMappingAction.INSTANCE, putMappingRequest, + ActionListener.wrap(response -> { + if (response.isAcknowledged()) { + listener.onResponse(true); + } else { + listener.onFailure(new ElasticsearchException("Attempt to put missing mapping in indices " + + Arrays.toString(indicesThatRequireAnUpdate) + " was not acknowledged")); + } + }, listener::onFailure)); + } catch (IOException e) { + listener.onFailure(e); + } + } else { + logger.trace("Mappings are up to date."); + listener.onResponse(true); + } + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java index e4ce536a3ccf6..e87515afadd1d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java @@ -9,10 +9,18 @@ import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.ModelPlotConfig; @@ -30,6 +38,8 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -128,6 +138,96 @@ public void testTermFieldMapping() throws IOException { assertNull(instanceMapping); } + + public void testMappingRequiresUpdateNoMapping() throws IOException { + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + ClusterState cs = csBuilder.build(); + String[] indices = new String[] { "no_index" }; + + assertArrayEquals(new String[] { "no_index" }, ElasticsearchMappings.mappingRequiresUpdate(cs, indices, Version.CURRENT)); + } + + public void testMappingRequiresUpdateNullMapping() throws IOException { + ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("null_mapping", null)); + String[] indices = new String[] { "null_index" }; + assertArrayEquals(indices, ElasticsearchMappings.mappingRequiresUpdate(cs, indices, Version.CURRENT)); + } + + public void testMappingRequiresUpdateNoVersion() throws IOException { + ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("no_version_field", "NO_VERSION_FIELD")); + String[] indices = new String[] { "no_version_field" }; + assertArrayEquals(indices, ElasticsearchMappings.mappingRequiresUpdate(cs, indices, Version.CURRENT)); + } + + public void testMappingRequiresUpdateRecentMappingVersion() throws IOException { + ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_current", Version.CURRENT.toString())); + String[] indices = new String[] { "version_current" }; + assertArrayEquals(new String[] {}, ElasticsearchMappings.mappingRequiresUpdate(cs, indices, Version.CURRENT)); + } + + public void testMappingRequiresUpdateMaliciousMappingVersion() throws IOException { + ClusterState cs = getClusterStateWithMappingsWithMetaData( + Collections.singletonMap("version_current", Collections.singletonMap("nested", "1.0"))); + String[] indices = new String[] { "version_nested" }; + assertArrayEquals(indices, ElasticsearchMappings.mappingRequiresUpdate(cs, indices, Version.CURRENT)); + } + + public void testMappingRequiresUpdateBogusMappingVersion() throws IOException { + ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_bogus", "0.0")); + String[] indices = new String[] { "version_bogus" }; + assertArrayEquals(indices, ElasticsearchMappings.mappingRequiresUpdate(cs, indices, Version.CURRENT)); + } + + public void testMappingRequiresUpdateNewerMappingVersion() throws IOException { + ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_newer", Version.CURRENT)); + String[] indices = new String[] { "version_newer" }; + assertArrayEquals(new String[] {}, ElasticsearchMappings.mappingRequiresUpdate(cs, indices, VersionUtils.getPreviousVersion())); + } + + public void testMappingRequiresUpdateNewerMappingVersionMinor() throws IOException { + ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_newer_minor", Version.CURRENT)); + String[] indices = new String[] { "version_newer_minor" }; + assertArrayEquals(new String[] {}, + ElasticsearchMappings.mappingRequiresUpdate(cs, indices, VersionUtils.getPreviousMinorVersion())); + } + + + private ClusterState getClusterStateWithMappingsWithMetaData(Map namesAndVersions) throws IOException { + MetaData.Builder metaDataBuilder = MetaData.builder(); + + for (Map.Entry entry : namesAndVersions.entrySet()) { + + String indexName = entry.getKey(); + Object version = entry.getValue(); + + IndexMetaData.Builder indexMetaData = IndexMetaData.builder(indexName); + indexMetaData.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); + + Map mapping = new HashMap<>(); + Map properties = new HashMap<>(); + for (int i = 0; i < 10; i++) { + properties.put("field" + i, Collections.singletonMap("type", "string")); + } + mapping.put("properties", properties); + + Map meta = new HashMap<>(); + if (version != null && version.equals("NO_VERSION_FIELD") == false) { + meta.put("version", version); + } + mapping.put("_meta", meta); + + indexMetaData.putMapping(new MappingMetaData(ElasticsearchMappings.DOC_TYPE, mapping)); + + metaDataBuilder.put(indexMetaData); + } + MetaData metaData = metaDataBuilder.build(); + + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.metaData(metaData); + return csBuilder.build(); + } + private Set collectResultsDocFieldNames() throws IOException { // Only the mappings for the results index should be added below. Do NOT add mappings for other indexes here. return collectFieldNames(ElasticsearchMappings.resultsMapping()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index 2da89c359e793..a5aed9b5b5957 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -7,14 +7,10 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceAlreadyExistsException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -23,21 +19,14 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.AliasOrIndex; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.Index; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.persistent.AllocatedPersistentTask; @@ -45,7 +34,6 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksExecutor; import org.elasticsearch.persistent.PersistentTasksService; -import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; @@ -69,9 +57,7 @@ import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; -import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.LinkedList; import java.util.List; @@ -405,54 +391,6 @@ private static boolean jobHasRules(Job job) { return job.getAnalysisConfig().getDetectors().stream().anyMatch(d -> d.getRules().isEmpty() == false); } - static String[] mappingRequiresUpdate(ClusterState state, String[] concreteIndices, Version minVersion, - Logger logger) throws IOException { - List indicesToUpdate = new ArrayList<>(); - - ImmutableOpenMap> currentMapping = state.metaData().findMappings(concreteIndices, - new String[] { ElasticsearchMappings.DOC_TYPE }, MapperPlugin.NOOP_FIELD_FILTER); - - for (String index : concreteIndices) { - ImmutableOpenMap innerMap = currentMapping.get(index); - if (innerMap != null) { - MappingMetaData metaData = innerMap.get(ElasticsearchMappings.DOC_TYPE); - try { - Map meta = (Map) metaData.sourceAsMap().get("_meta"); - if (meta != null) { - String versionString = (String) meta.get("version"); - if (versionString == null) { - logger.info("Version of mappings for [{}] not found, recreating", index); - indicesToUpdate.add(index); - continue; - } - - Version mappingVersion = Version.fromString(versionString); - - if (mappingVersion.onOrAfter(minVersion)) { - continue; - } else { - logger.info("Mappings for [{}] are outdated [{}], updating it[{}].", index, mappingVersion, Version.CURRENT); - indicesToUpdate.add(index); - continue; - } - } else { - logger.info("Version of mappings for [{}] not found, recreating", index); - indicesToUpdate.add(index); - continue; - } - } catch (Exception e) { - logger.error(new ParameterizedMessage("Failed to retrieve mapping version for [{}], recreating", index), e); - indicesToUpdate.add(index); - continue; - } - } else { - logger.info("No mappings found for [{}], recreating", index); - indicesToUpdate.add(index); - } - } - return indicesToUpdate.toArray(new String[indicesToUpdate.size()]); - } - @Override protected String executor() { // This api doesn't do heavy or blocking operations (just delegates PersistentTasksService), @@ -527,25 +465,18 @@ public void onFailure(Exception e) { ); // Try adding state doc mapping - ActionListener resultsPutMappingHandler = ActionListener.wrap( + ActionListener getJobHandler = ActionListener.wrap( response -> { - addDocMappingIfMissing(AnomalyDetectorsIndex.jobStateIndexWriteAlias(), ElasticsearchMappings::stateMapping, - state, jobUpdateListener); + ElasticsearchMappings.addDocMappingIfMissing(AnomalyDetectorsIndex.jobStateIndexWriteAlias(), + ElasticsearchMappings::stateMapping, client, state, jobUpdateListener); }, listener::onFailure ); // Get the job config jobConfigProvider.getJob(jobParams.getJobId(), ActionListener.wrap( builder -> { - try { - jobParams.setJob(builder.build()); - - // Try adding results doc mapping - addDocMappingIfMissing(AnomalyDetectorsIndex.jobResultsAliasedName(jobParams.getJobId()), - ElasticsearchMappings::resultsMapping, state, resultsPutMappingHandler); - } catch (Exception e) { - listener.onFailure(e); - } + jobParams.setJob(builder.build()); + getJobHandler.onResponse(null); }, listener::onFailure )); @@ -620,48 +551,6 @@ public void onFailure(Exception e) { ); } - private void addDocMappingIfMissing(String alias, CheckedSupplier mappingSupplier, ClusterState state, - ActionListener listener) { - AliasOrIndex aliasOrIndex = state.metaData().getAliasAndIndexLookup().get(alias); - if (aliasOrIndex == null) { - // The index has never been created yet - listener.onResponse(true); - return; - } - String[] concreteIndices = aliasOrIndex.getIndices().stream().map(IndexMetaData::getIndex).map(Index::getName) - .toArray(String[]::new); - - String[] indicesThatRequireAnUpdate; - try { - indicesThatRequireAnUpdate = mappingRequiresUpdate(state, concreteIndices, Version.CURRENT, logger); - } catch (IOException e) { - listener.onFailure(e); - return; - } - - if (indicesThatRequireAnUpdate.length > 0) { - try (XContentBuilder mapping = mappingSupplier.get()) { - PutMappingRequest putMappingRequest = new PutMappingRequest(indicesThatRequireAnUpdate); - putMappingRequest.type(ElasticsearchMappings.DOC_TYPE); - putMappingRequest.source(mapping); - executeAsyncWithOrigin(client, ML_ORIGIN, PutMappingAction.INSTANCE, putMappingRequest, - ActionListener.wrap(response -> { - if (response.isAcknowledged()) { - listener.onResponse(true); - } else { - listener.onFailure(new ElasticsearchException("Attempt to put missing mapping in indices " - + Arrays.toString(indicesThatRequireAnUpdate) + " was not acknowledged")); - } - }, listener::onFailure)); - } catch (IOException e) { - listener.onFailure(e); - } - } else { - logger.trace("Mappings are uptodate."); - listener.onResponse(true); - } - } - public static class OpenJobPersistentTasksExecutor extends PersistentTasksExecutor { private static final Logger logger = LogManager.getLogger(OpenJobPersistentTasksExecutor.class); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index 9695d73ed05c5..dd3656ee04b67 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -40,6 +40,7 @@ import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.job.process.autodetect.output.FlushAcknowledgement; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; @@ -417,7 +418,9 @@ public void onFailure(Exception e) { public void openJob(JobTask jobTask, ClusterState clusterState, Consumer closeHandler) { String jobId = jobTask.getJobId(); logger.info("Opening job [{}]", jobId); - AnomalyDetectorsIndex.createStateIndexAndAliasIfNecessary(client, clusterState, ActionListener.wrap( + + // Start the process + ActionListener stateAliasHandler = ActionListener.wrap( r -> { jobManager.getJob(jobId, ActionListener.wrap( job -> { @@ -427,7 +430,6 @@ public void openJob(JobTask jobTask, ClusterState clusterState, Consumer { // We need to fork, otherwise we restore model state from a network thread (several GET api calls): @@ -477,7 +479,17 @@ protected void doRun() { closeHandler )); }, - closeHandler)); + closeHandler); + + // Make sure the state index and alias exist + ActionListener resultsMappingUpdateHandler = ActionListener.wrap( + ack -> AnomalyDetectorsIndex.createStateIndexAndAliasIfNecessary(client, clusterState, stateAliasHandler), + closeHandler + ); + + // Try adding the results doc mapping - this updates to the latest version if an old mapping is present + ElasticsearchMappings.addDocMappingIfMissing(AnomalyDetectorsIndex.jobResultsAliasedName(jobId), + ElasticsearchMappings::resultsMapping, client, clusterState, resultsMappingUpdateHandler); } private void createProcessAndSetRunning(ProcessContext processContext, Job job, AutodetectParams params, Consumer handler) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java index 9bd32bdc9eff3..da54b33d27597 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -38,7 +37,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.OpenJobAction; @@ -53,7 +51,6 @@ import org.elasticsearch.xpack.core.ml.job.config.RuleCondition; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; -import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.notifications.AuditorField; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; @@ -61,7 +58,6 @@ import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; import org.junit.Before; -import java.io.IOException; import java.net.InetAddress; import java.util.ArrayList; import java.util.Arrays; @@ -486,59 +482,6 @@ public void testVerifyIndicesPrimaryShardsAreActive() { assertEquals(indexToRemove, result.get(0)); } - public void testMappingRequiresUpdateNoMapping() throws IOException { - ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); - ClusterState cs = csBuilder.build(); - String[] indices = new String[] { "no_index" }; - - assertArrayEquals(new String[] { "no_index" }, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); - } - - public void testMappingRequiresUpdateNullMapping() throws IOException { - ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("null_mapping", null)); - String[] indices = new String[] { "null_index" }; - assertArrayEquals(indices, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); - } - - public void testMappingRequiresUpdateNoVersion() throws IOException { - ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("no_version_field", "NO_VERSION_FIELD")); - String[] indices = new String[] { "no_version_field" }; - assertArrayEquals(indices, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); - } - - public void testMappingRequiresUpdateRecentMappingVersion() throws IOException { - ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_current", Version.CURRENT.toString())); - String[] indices = new String[] { "version_current" }; - assertArrayEquals(new String[] {}, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); - } - - public void testMappingRequiresUpdateMaliciousMappingVersion() throws IOException { - ClusterState cs = getClusterStateWithMappingsWithMetaData( - Collections.singletonMap("version_current", Collections.singletonMap("nested", "1.0"))); - String[] indices = new String[] { "version_nested" }; - assertArrayEquals(indices, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); - } - - public void testMappingRequiresUpdateBogusMappingVersion() throws IOException { - ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_bogus", "0.0")); - String[] indices = new String[] { "version_bogus" }; - assertArrayEquals(indices, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); - } - - public void testMappingRequiresUpdateNewerMappingVersion() throws IOException { - ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_newer", Version.CURRENT)); - String[] indices = new String[] { "version_newer" }; - assertArrayEquals(new String[] {}, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, VersionUtils.getPreviousVersion(), - logger)); - } - - public void testMappingRequiresUpdateNewerMappingVersionMinor() throws IOException { - ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_newer_minor", Version.CURRENT)); - String[] indices = new String[] { "version_newer_minor" }; - assertArrayEquals(new String[] {}, - TransportOpenJobAction.mappingRequiresUpdate(cs, indices, VersionUtils.getPreviousMinorVersion(), logger)); - } - public void testNodeNameAndVersion() { TransportAddress ta = new TransportAddress(InetAddress.getLoopbackAddress(), 9300); Map attributes = new HashMap<>(); @@ -641,42 +584,6 @@ private void addIndices(MetaData.Builder metaData, RoutingTable.Builder routingT } } - private ClusterState getClusterStateWithMappingsWithMetaData(Map namesAndVersions) throws IOException { - MetaData.Builder metaDataBuilder = MetaData.builder(); - - for (Map.Entry entry : namesAndVersions.entrySet()) { - - String indexName = entry.getKey(); - Object version = entry.getValue(); - - IndexMetaData.Builder indexMetaData = IndexMetaData.builder(indexName); - indexMetaData.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); - - Map mapping = new HashMap<>(); - Map properties = new HashMap<>(); - for (int i = 0; i < 10; i++) { - properties.put("field" + i, Collections.singletonMap("type", "string")); - } - mapping.put("properties", properties); - - Map meta = new HashMap<>(); - if (version != null && version.equals("NO_VERSION_FIELD") == false) { - meta.put("version", version); - } - mapping.put("_meta", meta); - - indexMetaData.putMapping(new MappingMetaData(ElasticsearchMappings.DOC_TYPE, mapping)); - - metaDataBuilder.put(indexMetaData); - } - MetaData metaData = metaDataBuilder.build(); - - ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); - csBuilder.metaData(metaData); - return csBuilder.build(); - } - private static Job jobWithRules(String jobId) { DetectionRule rule = new DetectionRule.Builder(Collections.singletonList( new RuleCondition(RuleCondition.AppliesTo.TYPICAL, Operator.LT, 100.0) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java index 9024d0edcee9c..ba319f1a90781 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java @@ -141,6 +141,7 @@ public void setup() throws Exception { when(metaData.getAliasAndIndexLookup()).thenReturn(aliasOrIndexSortedMap); clusterState = mock(ClusterState.class); when(clusterState.getMetaData()).thenReturn(metaData); + when(clusterState.metaData()).thenReturn(metaData); doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java new file mode 100644 index 0000000000000..5602f14ef2267 --- /dev/null +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.upgrades; + +import org.elasticsearch.Version; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ml.job.config.AnalysisConfig; +import org.elasticsearch.client.ml.job.config.DataDescription; +import org.elasticsearch.client.ml.job.config.Detector; +import org.elasticsearch.client.ml.job.config.Job; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.test.rest.XPackRestTestHelper; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class MlMappingsUpgradeIT extends AbstractUpgradeTestCase { + + private static final String JOB_ID = "ml-mappings-upgrade-job"; + + @Override + protected Collection templatesToWaitFor() { + return Stream.concat(XPackRestTestHelper.ML_POST_V660_TEMPLATES.stream(), + super.templatesToWaitFor().stream()).collect(Collectors.toSet()); + } + + /** + * The purpose of this test is to ensure that when a job is open through a rolling upgrade we upgrade the results + * index mappings when it is assigned to an upgraded node even if no other ML endpoint is called after the upgrade + */ + public void testMappingsUpgrade() throws Exception { + + switch (CLUSTER_TYPE) { + case OLD: + createAndOpenTestJob(); + break; + case MIXED: + // We don't know whether the job is on an old or upgraded node, so cannot assert that the mappings have been upgraded + break; + case UPGRADED: + assertUpgradedMappings(); + break; + default: + throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]"); + } + } + + private void createAndOpenTestJob() throws IOException { + + Detector.Builder d = new Detector.Builder("metric", "responsetime"); + d.setByFieldName("airline"); + AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(d.build())); + analysisConfig.setBucketSpan(TimeValue.timeValueMinutes(10)); + Job.Builder job = new Job.Builder(JOB_ID); + job.setAnalysisConfig(analysisConfig); + job.setDataDescription(new DataDescription.Builder()); + + Request putJob = new Request("PUT", "_ml/anomaly_detectors/" + JOB_ID); + putJob.setJsonEntity(Strings.toString(job.build())); + Response response = client().performRequest(putJob); + assertEquals(200, response.getStatusLine().getStatusCode()); + + Request openJob = new Request("POST", "_ml/anomaly_detectors/" + JOB_ID + "/_open"); + response = client().performRequest(openJob); + assertEquals(200, response.getStatusLine().getStatusCode()); + } + + @SuppressWarnings("unchecked") + private void assertUpgradedMappings() throws Exception { + + assertBusy(() -> { + Request getMappings = new Request("GET", AnomalyDetectorsIndex.resultsWriteAlias(JOB_ID) + "/_mappings"); + Response response = client().performRequest(getMappings); + + Map responseLevel = entityAsMap(response); + assertNotNull(responseLevel); + Map indexLevel = (Map) responseLevel.get(".ml-anomalies-shared"); + assertNotNull(indexLevel); + Map mappingsLevel = (Map) indexLevel.get("mappings"); + assertNotNull(mappingsLevel); + Map metaLevel = (Map) mappingsLevel.get("_meta"); + assertEquals(Collections.singletonMap("version", Version.CURRENT.toString()), metaLevel); + Map propertiesLevel = (Map) mappingsLevel.get("properties"); + assertNotNull(propertiesLevel); + // TODO: as the years go by, the field we assert on here should be changed + // to the most recent field we've added that is NOT of type "keyword" + Map fieldLevel = (Map) propertiesLevel.get("multi_bucket_impact"); + assertEquals(Collections.singletonMap("type", "double"), fieldLevel); + }); + } +} From daa2ec8a605d385a65b9ab3e89d016b3fd0dffe2 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Wed, 23 Jan 2019 10:40:05 +0100 Subject: [PATCH 15/24] Switch mapping/aggregations over to java time (#36363) This commit moves the aggregation and mapping code from joda time to java time. This includes field mappers, root object mappers, aggregations with date histograms, query builders and a lot of changes within tests. The cut-over to java time is a requirement so that we can support nanoseconds properly in a future field mapper. Relates #27330 --- .../time/DateFormatterBenchmark.java | 1 - .../index/mapper/DenseVectorFieldMapper.java | 4 +- .../index/mapper/ScaledFloatFieldMapper.java | 4 +- .../index/mapper/SparseVectorFieldMapper.java | 4 +- .../ICUCollationKeywordFieldMapper.java | 4 +- .../60_pipeline_timestamp_date_mapping.yml | 2 +- ...g.yml => 180_locale_dependent_mapping.yml} | 0 .../cluster/metadata/IndexGraveyard.java | 3 +- .../metadata/IndexNameExpressionResolver.java | 13 +- .../cluster/routing/UnassignedInfo.java | 3 +- .../org/elasticsearch/common/Rounding.java | 20 +- .../java/org/elasticsearch/common/Table.java | 3 +- .../common/io/stream/StreamInput.java | 17 + .../common/io/stream/StreamOutput.java | 21 +- .../common/joda/JodaDateFormatter.java | 21 +- .../common/joda/JodaDateMathParser.java | 7 +- .../common/time/DateFormatter.java | 17 +- .../common/time/DateFormatters.java | 149 +++- .../common/time/DateMathParser.java | 9 +- .../elasticsearch/common/time/DateUtils.java | 10 +- .../elasticsearch/common/time/EpochTime.java | 26 +- .../common/time/JavaDateFormatter.java | 18 +- .../common/time/JavaDateMathParser.java | 32 +- .../XContentElasticsearchExtension.java | 7 +- .../index/mapper/BinaryFieldMapper.java | 5 +- .../index/mapper/BooleanFieldMapper.java | 4 +- .../index/mapper/DateFieldMapper.java | 85 +- .../index/mapper/DocumentParser.java | 12 +- .../index/mapper/IpFieldMapper.java | 4 +- .../index/mapper/MappedFieldType.java | 14 +- .../index/mapper/NumberFieldMapper.java | 4 +- .../index/mapper/RangeFieldMapper.java | 53 +- .../index/mapper/RootObjectMapper.java | 6 +- .../index/mapper/SimpleMappedFieldType.java | 7 +- .../index/mapper/TypeParsers.java | 1 - .../index/query/QueryStringQueryBuilder.java | 22 +- .../index/query/RangeQueryBuilder.java | 65 +- .../index/search/QueryStringQueryParser.java | 6 +- .../elasticsearch/ingest/IngestDocument.java | 6 +- .../elasticsearch/monitor/jvm/HotThreads.java | 3 +- .../rest/action/cat/RestIndicesAction.java | 5 +- .../rest/action/cat/RestSnapshotAction.java | 3 +- .../rest/action/cat/RestTasksAction.java | 3 +- .../script/JodaCompatibleZonedDateTime.java | 3 +- .../script/ScoreScriptUtils.java | 6 +- .../elasticsearch/search/DocValueFormat.java | 31 +- .../DateHistogramValuesSourceBuilder.java | 34 +- .../composite/RoundingValuesSource.java | 2 +- .../AutoDateHistogramAggregationBuilder.java | 29 +- .../AutoDateHistogramAggregator.java | 2 +- .../DateHistogramAggregationBuilder.java | 89 +- .../histogram/DateHistogramAggregator.java | 2 +- .../DateHistogramAggregatorFactory.java | 2 +- .../bucket/histogram/ExtendedBounds.java | 2 +- .../histogram/InternalAutoDateHistogram.java | 8 +- .../histogram/InternalDateHistogram.java | 12 +- .../histogram/ParsedAutoDateHistogram.java | 6 +- .../bucket/histogram/ParsedDateHistogram.java | 6 +- .../range/DateRangeAggregationBuilder.java | 24 +- .../bucket/range/InternalDateRange.java | 10 +- .../bucket/range/ParsedDateRange.java | 9 +- .../DerivativePipelineAggregationBuilder.java | 7 +- .../support/MultiValuesSourceFieldConfig.java | 35 +- .../aggregations/support/ValueType.java | 4 +- .../ValuesSourceAggregationBuilder.java | 24 +- .../support/ValuesSourceConfig.java | 18 +- .../support/ValuesSourceParserHelper.java | 8 +- .../elasticsearch/snapshots/SnapshotInfo.java | 3 +- .../admin/indices/rollover/RolloverIT.java | 14 +- .../DateMathExpressionResolverTests.java | 14 +- .../elasticsearch/common/RoundingTests.java | 5 +- .../joda/JavaJodaTimeDuellingTests.java | 94 +- .../common/joda/JodaDateMathParserTests.java | 19 +- .../elasticsearch/common/joda/JodaTests.java | 29 +- .../common/joda/SimpleJodaTests.java | 800 ------------------ .../common/rounding/RoundingDuelTests.java | 3 + .../common/time/DateFormattersTests.java | 59 +- .../common/time/JavaDateMathParserTests.java | 30 +- .../index/mapper/DateFieldMapperTests.java | 64 +- .../index/mapper/DateFieldTypeTests.java | 37 +- .../index/mapper/DynamicMappingTests.java | 3 +- .../index/mapper/DynamicTemplatesTests.java | 2 - .../index/mapper/RangeFieldMapperTests.java | 2 +- ...angeFieldQueryStringQueryBuilderTests.java | 7 +- .../index/mapper/RangeFieldTypeTests.java | 11 +- .../query/QueryStringQueryBuilderTests.java | 15 +- .../index/query/RangeQueryBuilderTests.java | 18 +- .../indices/IndicesRequestCacheIT.java | 26 +- .../search/DocValueFormatTests.java | 8 +- .../bucket/AutoDateHistogramTests.java | 2 +- .../aggregations/bucket/DateHistogramIT.java | 344 ++++---- .../bucket/DateHistogramOffsetIT.java | 41 +- .../aggregations/bucket/DateRangeIT.java | 262 +++--- .../aggregations/bucket/DateRangeTests.java | 2 +- .../aggregations/bucket/MinDocCountIT.java | 5 +- .../CompositeAggregationBuilderTests.java | 2 +- .../composite/CompositeAggregatorTests.java | 12 +- .../composite/InternalCompositeTests.java | 4 +- .../AutoDateHistogramAggregatorTests.java | 256 +++--- .../DateHistogramAggregatorTests.java | 3 +- .../bucket/histogram/DateHistogramTests.java | 13 +- .../bucket/histogram/ExtendedBoundsTests.java | 24 +- .../InternalAutoDateHistogramTests.java | 26 +- .../histogram/InternalDateHistogramTests.java | 8 +- .../metrics/WeightedAvgAggregatorTests.java | 6 +- .../pipeline/AvgBucketAggregatorTests.java | 7 +- .../aggregations/pipeline/BucketSortIT.java | 8 +- .../CumulativeSumAggregatorTests.java | 3 +- .../pipeline/DateDerivativeIT.java | 203 +++-- .../aggregations/pipeline/MovFnUnitTests.java | 3 +- .../PipelineAggregationHelperTests.java | 2 +- .../MultiValuesSourceFieldConfigTests.java | 4 +- .../highlight/HighlighterSearchIT.java | 10 +- .../search/fields/SearchFieldsIT.java | 14 +- .../search/query/SearchQueryIT.java | 78 +- .../test/AbstractSerializingTestCase.java | 7 + .../license/licensor/TestUtils.java | 10 +- .../org/elasticsearch/license/DateUtils.java | 28 +- .../ml/action/GetOverallBucketsAction.java | 2 +- .../core/ml/action/StartDatafeedAction.java | 2 +- .../ml/datafeed/extractor/ExtractorUtils.java | 10 +- .../xpack/core/ml/utils/time/TimeUtils.java | 4 +- .../rollup/job/DateHistogramGroupConfig.java | 20 +- .../watcher/support/WatcherDateTimeUtils.java | 2 +- .../RewriteCachingDirectoryReaderTests.java | 8 +- .../org/elasticsearch/license/TestUtils.java | 5 +- .../core/ml/datafeed/DatafeedConfigTests.java | 10 +- .../extractor/ExtractorUtilsTests.java | 15 +- .../xpack/core/rollup/ConfigTestHelpers.java | 4 +- ...eHistogramGroupConfigSerializingTests.java | 12 +- .../ml/transforms/PainlessDomainSplitIT.java | 18 +- .../xpack/ml/MlDailyMaintenanceService.java | 15 +- .../xpack/ml/datafeed/DatafeedJob.java | 1 - .../DatafeedDelayedDataDetector.java | 8 +- .../AggregationToJsonProcessor.java | 10 +- .../extractor/fields/ExtractedField.java | 3 - .../OverallBucketsProvider.java | 6 +- .../AbstractExpiredJobDataRemover.java | 6 +- .../retention/ExpiredForecastsRemover.java | 6 +- .../extractor/fields/ExtractedFieldTests.java | 10 +- .../fields/TimeBasedExtractedFieldsTests.java | 8 - .../TimestampFormatFinderTests.java | 4 +- .../AutodetectResultProcessorIT.java | 15 +- .../ml/job/results/AutodetectResultTests.java | 21 +- .../xpack/ml/job/results/BucketTests.java | 4 +- .../xpack/ml/job/results/ForecastTests.java | 2 +- .../xpack/ml/job/results/ModelPlotTests.java | 8 +- .../ml/job/results/OverallBucketTests.java | 4 +- .../xpack/monitoring/MonitoringTestUtils.java | 3 +- .../local/LocalExporterIntegTests.java | 2 +- .../rollup/RollupJobIdentifierUtils.java | 17 +- .../xpack/rollup/job/RollupIndexer.java | 5 +- .../rollup/RollupJobIdentifierUtilTests.java | 5 +- .../rollup/action/SearchActionTests.java | 2 +- .../xpack/rollup/config/ConfigTests.java | 5 +- .../job/RollupIndexerIndexingTests.java | 61 +- .../querydsl/agg/GroupByDateHistogram.java | 3 +- 157 files changed, 1823 insertions(+), 2249 deletions(-) rename rest-api-spec/src/main/resources/rest-api-spec/test/search/{180_local_dependent_mapping.yml => 180_locale_dependent_mapping.yml} (100%) delete mode 100644 server/src/test/java/org/elasticsearch/common/joda/SimpleJodaTests.java diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterBenchmark.java index b30b3ada0ab64..a364a331400a5 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/time/DateFormatterBenchmark.java @@ -55,4 +55,3 @@ public TemporalAccessor parseJodaDate() { return jodaFormatter.parse("1234567890"); } } - diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/DenseVectorFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/DenseVectorFieldMapper.java index fdcb1f54ea7dd..7beddc13ca598 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/DenseVectorFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/DenseVectorFieldMapper.java @@ -31,9 +31,9 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; import java.util.List; import java.util.Map; @@ -107,7 +107,7 @@ public String typeName() { } @Override - public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) { + public DocValueFormat docValueFormat(String format, ZoneId timeZone) { throw new UnsupportedOperationException( "Field [" + name() + "] of type [" + typeName() + "] doesn't support docvalue_fields or aggregations"); } diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java index d3719ec884fa1..38d635ab3939f 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java @@ -59,10 +59,10 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; -import org.joda.time.DateTimeZone; import java.io.IOException; import java.math.BigDecimal; +import java.time.ZoneId; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; @@ -301,7 +301,7 @@ public Object valueForDisplay(Object value) { } @Override - public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) { + public DocValueFormat docValueFormat(String format, ZoneId timeZone) { if (timeZone != null) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones"); diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SparseVectorFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SparseVectorFieldMapper.java index 2eb360255d070..f7288d5039390 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SparseVectorFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/SparseVectorFieldMapper.java @@ -31,9 +31,9 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; import java.util.List; import java.util.Map; @@ -107,7 +107,7 @@ public String typeName() { } @Override - public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) { + public DocValueFormat docValueFormat(String format, ZoneId timeZone) { throw new UnsupportedOperationException( "Field [" + name() + "] of type [" + typeName() + "] doesn't support docvalue_fields or aggregations"); } diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java index d4cc9ee9d6e89..a228283527d66 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java @@ -46,9 +46,9 @@ import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; import java.util.ArrayList; import java.util.Iterator; import java.util.List; @@ -208,7 +208,7 @@ public BytesRef parseBytesRef(String value) { }; @Override - public DocValueFormat docValueFormat(final String format, final DateTimeZone timeZone) { + public DocValueFormat docValueFormat(final String format, final ZoneId timeZone) { return COLLATE_FORMAT; } } diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml index 0f8b5517dd4d2..ea0984ef3bcbf 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml @@ -9,7 +9,7 @@ index: timetest body: mappings: - test: { "properties": { "my_time": {"type": "date"}}} + test: { "properties": { "my_time": {"type": "date", "format": "strict_date_optional_time_nanos"}}} - do: ingest.put_pipeline: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/180_local_dependent_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/180_locale_dependent_mapping.yml similarity index 100% rename from rest-api-spec/src/main/resources/rest-api-spec/test/search/180_local_dependent_mapping.yml rename to rest-api-spec/src/main/resources/rest-api-spec/test/search/180_locale_dependent_mapping.yml diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java index 04e372a5f91de..ce93b27d770c1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.xcontent.ContextParser; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -368,7 +367,7 @@ public static final class Tombstone implements ToXContentObject, Writeable { TOMBSTONE_PARSER.declareString((b, s) -> {}, new ParseField(DELETE_DATE_KEY)); } - static final DateFormatter FORMATTER = DateFormatters.forPattern("strict_date_optional_time").withZone(ZoneOffset.UTC); + static final DateFormatter FORMATTER = DateFormatter.forPattern("strict_date_optional_time").withZone(ZoneOffset.UTC); static ContextParser getParser() { return (parser, context) -> TOMBSTONE_PARSER.apply(parser, null).build(); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index f60866383107a..050d97ba54cf0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -28,8 +28,8 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.time.DateMathParser; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -819,7 +819,7 @@ private static List resolveEmptyOrTrivialWildcard(IndicesOptions options static final class DateMathExpressionResolver implements ExpressionResolver { - private static final DateFormatter DEFAULT_DATE_FORMATTER = DateFormatters.forPattern("uuuu.MM.dd"); + private static final DateFormatter DEFAULT_DATE_FORMATTER = DateFormatter.forPattern("uuuu.MM.dd"); private static final String EXPRESSION_LEFT_BOUND = "<"; private static final String EXPRESSION_RIGHT_BOUND = ">"; private static final char LEFT_BOUND = '{'; @@ -912,18 +912,19 @@ String resolveExpression(String expression, final Context context) { int formatPatternTimeZoneSeparatorIndex = patternAndTZid.indexOf(TIME_ZONE_BOUND); if (formatPatternTimeZoneSeparatorIndex != -1) { dateFormatterPattern = patternAndTZid.substring(0, formatPatternTimeZoneSeparatorIndex); - timeZone = ZoneId.of(patternAndTZid.substring(formatPatternTimeZoneSeparatorIndex + 1)); + timeZone = DateUtils.of(patternAndTZid.substring(formatPatternTimeZoneSeparatorIndex + 1)); } else { dateFormatterPattern = patternAndTZid; timeZone = ZoneOffset.UTC; } - dateFormatter = DateFormatters.forPattern(dateFormatterPattern); + dateFormatter = DateFormatter.forPattern(dateFormatterPattern); } + DateFormatter formatter = dateFormatter.withZone(timeZone); DateMathParser dateMathParser = formatter.toDateMathParser(); - long millis = dateMathParser.parse(mathExpression, context::getStartTime, false, timeZone); + Instant instant = dateMathParser.parse(mathExpression, context::getStartTime, false, timeZone); - String time = formatter.format(Instant.ofEpochMilli(millis)); + String time = formatter.format(instant); beforePlaceHolderSb.append(time); inPlaceHolderSb = new StringBuilder(); inPlaceHolder = false; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java b/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java index 21885d1788c7e..f8afbeb449361 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java @@ -32,7 +32,6 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -48,7 +47,7 @@ */ public final class UnassignedInfo implements ToXContentFragment, Writeable { - public static final DateFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("dateOptionalTime").withZone(ZoneOffset.UTC); + public static final DateFormatter DATE_TIME_FORMATTER = DateFormatter.forPattern("dateOptionalTime").withZone(ZoneOffset.UTC); public static final Setting INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING = Setting.positiveTimeSetting("index.unassigned.node_left.delayed_timeout", TimeValue.timeValueMinutes(1), Property.Dynamic, diff --git a/server/src/main/java/org/elasticsearch/common/Rounding.java b/server/src/main/java/org/elasticsearch/common/Rounding.java index 593964f61e93f..dab29c88634e9 100644 --- a/server/src/main/java/org/elasticsearch/common/Rounding.java +++ b/server/src/main/java/org/elasticsearch/common/Rounding.java @@ -19,9 +19,11 @@ package org.elasticsearch.common; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.unit.TimeValue; import java.io.IOException; @@ -188,7 +190,7 @@ static class TimeUnitRounding extends Rounding { TimeUnitRounding(StreamInput in) throws IOException { unit = DateTimeUnit.resolve(in.readByte()); - timeZone = ZoneId.of(in.readString()); + timeZone = DateUtils.of(in.readString()); unitRoundsToMidnight = unit.getField().getBaseUnit().getDuration().toMillis() > 60L * 60L * 1000L; } @@ -367,8 +369,11 @@ public long nextRoundingValue(long utcMillis) { @Override public void innerWriteTo(StreamOutput out) throws IOException { out.writeByte(unit.getId()); - String tz = ZoneOffset.UTC.equals(timeZone) ? "UTC" : timeZone.getId(); // stay joda compatible - out.writeString(tz); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeString(timeZone.getId()); + } else { + out.writeString(DateUtils.zoneIdToDateTimeZone(timeZone).getID()); + } } @Override @@ -417,7 +422,7 @@ public String toString() { TimeIntervalRounding(StreamInput in) throws IOException { interval = in.readVLong(); - timeZone = ZoneId.of(in.readString()); + timeZone = DateUtils.of(in.readString()); } @Override @@ -490,8 +495,11 @@ public long nextRoundingValue(long time) { @Override public void innerWriteTo(StreamOutput out) throws IOException { out.writeVLong(interval); - String tz = ZoneOffset.UTC.equals(timeZone) ? "UTC" : timeZone.getId(); // stay joda compatible - out.writeString(tz); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeString(timeZone.getId()); + } else { + out.writeString(DateUtils.zoneIdToDateTimeZone(timeZone).getID()); + } } @Override diff --git a/server/src/main/java/org/elasticsearch/common/Table.java b/server/src/main/java/org/elasticsearch/common/Table.java index a41fd267329ff..d097783a838f3 100644 --- a/server/src/main/java/org/elasticsearch/common/Table.java +++ b/server/src/main/java/org/elasticsearch/common/Table.java @@ -20,7 +20,6 @@ package org.elasticsearch.common; import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.time.DateFormatters; import java.time.Instant; import java.time.ZoneOffset; @@ -85,7 +84,7 @@ public Table endHeaders() { return this; } - private static final DateFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC); + private static final DateFormatter FORMATTER = DateFormatter.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC); public Table startRow() { if (headers.isEmpty()) { diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index fd9ffdfd31d16..7759e13e536b7 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -653,6 +653,23 @@ public DateTimeZone readOptionalTimeZone() throws IOException { return null; } + /** + * Read a {@linkplain DateTimeZone}. + */ + public ZoneId readZoneId() throws IOException { + return ZoneId.of(readString()); + } + + /** + * Read an optional {@linkplain ZoneId}. + */ + public ZoneId readOptionalZoneId() throws IOException { + if (readBoolean()) { + return ZoneId.of(readString()); + } + return null; + } + public int[] readIntArray() throws IOException { int length = readArraySize(); int[] values = new int[length]; diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index 8131335602693..699713cb0f836 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -55,6 +55,7 @@ import java.nio.file.FileSystemLoopException; import java.nio.file.NoSuchFileException; import java.nio.file.NotDirectoryException; +import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.Collection; import java.util.Collections; @@ -677,7 +678,6 @@ public final void writeMap(final Map map, final Writer keyWriter writers.put(ZonedDateTime.class, (o, v) -> { o.writeByte((byte) 23); final ZonedDateTime zonedDateTime = (ZonedDateTime) v; - zonedDateTime.getZone().getId(); o.writeString(zonedDateTime.getZone().getId()); o.writeLong(zonedDateTime.toInstant().toEpochMilli()); }); @@ -988,6 +988,13 @@ public void writeTimeZone(DateTimeZone timeZone) throws IOException { writeString(timeZone.getID()); } + /** + * Write a {@linkplain ZoneId} to the stream. + */ + public void writeZoneId(ZoneId timeZone) throws IOException { + writeString(timeZone.getId()); + } + /** * Write an optional {@linkplain DateTimeZone} to the stream. */ @@ -1000,6 +1007,18 @@ public void writeOptionalTimeZone(@Nullable DateTimeZone timeZone) throws IOExce } } + /** + * Write an optional {@linkplain ZoneId} to the stream. + */ + public void writeOptionalZoneId(@Nullable ZoneId timeZone) throws IOException { + if (timeZone == null) { + writeBoolean(false); + } else { + writeBoolean(true); + writeZoneId(timeZone); + } + } + /** * Writes a list of {@link Streamable} objects */ diff --git a/server/src/main/java/org/elasticsearch/common/joda/JodaDateFormatter.java b/server/src/main/java/org/elasticsearch/common/joda/JodaDateFormatter.java index 5db95b12bb437..706e995530962 100644 --- a/server/src/main/java/org/elasticsearch/common/joda/JodaDateFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/joda/JodaDateFormatter.java @@ -31,12 +31,12 @@ import java.time.ZonedDateTime; import java.time.temporal.TemporalAccessor; import java.util.Locale; +import java.util.Objects; public class JodaDateFormatter implements DateFormatter { - final String pattern; + final String pattern; final DateTimeFormatter parser; - final DateTimeFormatter printer; public JodaDateFormatter(String pattern, DateTimeFormatter parser, DateTimeFormatter printer) { @@ -108,4 +108,21 @@ public ZoneId zone() { public DateMathParser toDateMathParser() { return new JodaDateMathParser(this); } + + @Override + public int hashCode() { + return Objects.hash(locale(), zone(), pattern()); + } + + @Override + public boolean equals(Object obj) { + if (obj.getClass().equals(this.getClass()) == false) { + return false; + } + JodaDateFormatter other = (JodaDateFormatter) obj; + + return Objects.equals(pattern(), other.pattern()) && + Objects.equals(locale(), other.locale()) && + Objects.equals(zone(), other.zone()); + } } diff --git a/server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java b/server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java index b86af7a75a55f..b7522c6a3233e 100644 --- a/server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java +++ b/server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java @@ -26,6 +26,7 @@ import org.joda.time.MutableDateTime; import org.joda.time.format.DateTimeFormatter; +import java.time.Instant; import java.time.ZoneId; import java.util.Objects; import java.util.function.LongSupplier; @@ -50,7 +51,7 @@ public JodaDateMathParser(JodaDateFormatter dateTimeFormatter) { // if it has been used. For instance, the request cache does not cache requests that make // use of `now`. @Override - public long parse(String text, LongSupplier now, boolean roundUp, ZoneId tz) { + public Instant parse(String text, LongSupplier now, boolean roundUp, ZoneId tz) { final DateTimeZone timeZone = tz == null ? null : DateUtils.zoneIdToDateTimeZone(tz); long time; String mathString; @@ -64,13 +65,13 @@ public long parse(String text, LongSupplier now, boolean roundUp, ZoneId tz) { } else { int index = text.indexOf("||"); if (index == -1) { - return parseDateTime(text, timeZone, roundUp); + return Instant.ofEpochMilli(parseDateTime(text, timeZone, roundUp)); } time = parseDateTime(text.substring(0, index), timeZone, false); mathString = text.substring(index + 2); } - return parseMath(mathString, time, roundUp, timeZone); + return Instant.ofEpochMilli(parseMath(mathString, time, roundUp, timeZone)); } private long parseMath(String mathString, long time, boolean roundUp, DateTimeZone timeZone) throws ElasticsearchParseException { diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java index 8d83aa30b3587..aeea14ee1f011 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.time; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.joda.Joda; import org.joda.time.DateTime; import java.time.Instant; @@ -87,7 +86,8 @@ default DateTime parseJoda(String input) { * Return the given millis-since-epoch formatted with this format. */ default String formatMillis(long millis) { - return format(ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC)); + ZoneId zone = zone() != null ? zone() : ZoneOffset.UTC; + return format(Instant.ofEpochMilli(millis).atZone(zone)); } /** @@ -121,7 +121,9 @@ default String formatJoda(DateTime dateTime) { ZoneId zone(); /** - * Return a {@link DateMathParser} built from this formatter. + * Create a DateMathParser from the existing formatter + * + * @return The DateMathParser object */ DateMathParser toDateMathParser(); @@ -129,12 +131,11 @@ static DateFormatter forPattern(String input) { if (Strings.hasLength(input) == false) { throw new IllegalArgumentException("No date pattern provided"); } - if (input.startsWith("8") == false) { - return Joda.forPattern(input); - } - // dates starting with 8 will not be using joda but java time formatters - input = input.substring(1); + // support the 6.x BWC compatible way of parsing java 8 dates + if (input.startsWith("8")) { + input = input.substring(1); + } List formatters = new ArrayList<>(); for (String pattern : Strings.delimitedListToStringArray(input, "||")) { diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java index de75356a58995..2e3c2953ec375 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java @@ -76,28 +76,53 @@ public class DateFormatters { private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_PRINTER = new DateTimeFormatterBuilder() .append(STRICT_YEAR_MONTH_DAY_FORMATTER) .appendLiteral('T') - .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) - .appendFraction(NANO_OF_SECOND, 3, 9, true) + .optionalStart() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendFraction(NANO_OF_SECOND, 3, 3, true) + .optionalEnd() + .optionalEnd() .optionalStart() .appendZoneOrOffsetId() .optionalEnd() + .optionalEnd() + .optionalEnd() .toFormatter(Locale.ROOT); private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER = new DateTimeFormatterBuilder() .append(STRICT_YEAR_MONTH_DAY_FORMATTER) .optionalStart() .appendLiteral('T') - .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + .optionalStart() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) .optionalStart() .appendFraction(NANO_OF_SECOND, 3, 3, true) .optionalEnd() .optionalStart() + .appendFraction(NANO_OF_SECOND, 3, 9, true) + .optionalEnd() + .optionalEnd() + .optionalStart() .appendZoneOrOffsetId() .optionalEnd() .optionalStart() .append(TIME_ZONE_FORMATTER_NO_COLON) .optionalEnd() .optionalEnd() + .optionalEnd() + .optionalEnd() .toFormatter(Locale.ROOT); /** @@ -123,11 +148,33 @@ public class DateFormatters { .optionalEnd() .toFormatter(Locale.ROOT); + private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS = new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .appendLiteral('T') + .optionalStart() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendFraction(NANO_OF_SECOND, 3, 9, true) + .optionalEnd() + .optionalEnd() + .optionalStart() + .appendZoneOrOffsetId() + .optionalEnd() + .optionalEnd() + .optionalEnd() + .toFormatter(Locale.ROOT); + /** * Returns a generic ISO datetime parser where the date is mandatory and the time is optional with nanosecond resolution. */ private static final DateFormatter STRICT_DATE_OPTIONAL_TIME_NANOS = new JavaDateFormatter("strict_date_optional_time_nanos", - STRICT_DATE_OPTIONAL_TIME_PRINTER, STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS); + STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS, STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS); ///////////////////////////////////////// // @@ -329,31 +376,32 @@ public class DateFormatters { * Returns a basic formatter that combines a basic weekyear date and time * without millis, separated by a 'T' (xxxx'W'wwe'T'HHmmssX). */ - private static final DateFormatter STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS = new JavaDateFormatter("strict_basic_week_date_no_millis", - new DateTimeFormatterBuilder() - .append(STRICT_BASIC_WEEK_DATE_PRINTER) - .appendLiteral("T") - .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) - .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) - .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) - .appendZoneOrOffsetId() - .toFormatter(Locale.ROOT), - new DateTimeFormatterBuilder() - .append(STRICT_BASIC_WEEK_DATE_PRINTER) - .appendLiteral("T") - .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) - .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) - .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) - .appendZoneOrOffsetId() - .toFormatter(Locale.ROOT), - new DateTimeFormatterBuilder() - .append(STRICT_BASIC_WEEK_DATE_PRINTER) - .appendLiteral("T") - .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) - .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) - .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) - .append(TIME_ZONE_FORMATTER_NO_COLON) - .toFormatter(Locale.ROOT) + private static final DateFormatter STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS = + new JavaDateFormatter("strict_basic_week_date_time_no_millis", + new DateTimeFormatterBuilder() + .append(STRICT_BASIC_WEEK_DATE_PRINTER) + .appendLiteral("T") + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendZoneOrOffsetId() + .toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder() + .append(STRICT_BASIC_WEEK_DATE_PRINTER) + .appendLiteral("T") + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendZoneOrOffsetId() + .toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder() + .append(STRICT_BASIC_WEEK_DATE_PRINTER) + .appendLiteral("T") + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .append(TIME_ZONE_FORMATTER_NO_COLON) + .toFormatter(Locale.ROOT) ); /* @@ -389,7 +437,7 @@ public class DateFormatters { * An ISO date formatter that formats or parses a date without an offset, such as '2011-12-03'. */ private static final DateFormatter STRICT_DATE = new JavaDateFormatter("strict_date", - DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT)); + DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT).withLocale(Locale.ROOT)); /* * A date formatter that formats or parses a date plus an hour without an offset, such as '2011-12-03T01'. @@ -514,7 +562,9 @@ public class DateFormatters { new JavaDateFormatter("strict_hour_minute_second_millis", STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER); - private static final DateFormatter STRICT_HOUR_MINUTE_SECOND_FRACTION = STRICT_HOUR_MINUTE_SECOND_MILLIS; + private static final DateFormatter STRICT_HOUR_MINUTE_SECOND_FRACTION = + new JavaDateFormatter("strict_hour_minute_second_fraction", + STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER); /* * Returns a formatter that combines a full date, two digit hour of day, @@ -537,7 +587,21 @@ public class DateFormatters { .toFormatter(Locale.ROOT) ); - private static final DateFormatter STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS = STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION; + private static final DateFormatter STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS = new JavaDateFormatter( + "strict_date_hour_minute_second_millis", + new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .appendLiteral("T") + .append(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER) + .toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .appendLiteral("T") + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + // this one here is lenient as well to retain joda time based bwc compatibility + .appendFraction(NANO_OF_SECOND, 1, 3, true) + .toFormatter(Locale.ROOT) + ); /* * Returns a formatter for a two digit hour of day. (HH) @@ -782,14 +846,12 @@ public class DateFormatters { private static final DateTimeFormatter DATE_FORMATTER = new DateTimeFormatterBuilder() .appendValue(ChronoField.YEAR, 1, 5, SignStyle.NORMAL) - .optionalStart() .appendLiteral('-') .appendValue(MONTH_OF_YEAR, 1, 2, SignStyle.NOT_NEGATIVE) .optionalStart() .appendLiteral('-') .appendValue(DAY_OF_MONTH, 1, 2, SignStyle.NOT_NEGATIVE) .optionalEnd() - .optionalEnd() .toFormatter(Locale.ROOT); private static final DateTimeFormatter HOUR_MINUTE_FORMATTER = new DateTimeFormatterBuilder() @@ -928,7 +990,17 @@ public class DateFormatters { .append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER) .toFormatter(Locale.ROOT)); - private static final DateFormatter DATE_HOUR_MINUTE_SECOND_FRACTION = DATE_HOUR_MINUTE_SECOND_MILLIS; + private static final DateFormatter DATE_HOUR_MINUTE_SECOND_FRACTION = new JavaDateFormatter("date_hour_minute_second_fraction", + new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .appendLiteral("T") + .append(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER) + .toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral("T") + .append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER) + .toFormatter(Locale.ROOT)); /* * Returns a formatter that combines a full date, two digit hour of day, @@ -1033,6 +1105,9 @@ public class DateFormatters { private static final DateFormatter HOUR_MINUTE_SECOND_MILLIS = new JavaDateFormatter("hour_minute_second_millis", STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, HOUR_MINUTE_SECOND_MILLIS_FORMATTER); + private static final DateFormatter HOUR_MINUTE_SECOND_FRACTION = new JavaDateFormatter("hour_minute_second_fraction", + STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, HOUR_MINUTE_SECOND_MILLIS_FORMATTER); + /* * Returns a formatter for a two digit hour of day and two digit minute of * hour. (HH:mm) @@ -1272,7 +1347,7 @@ public class DateFormatters { // ///////////////////////////////////////// - public static DateFormatter forPattern(String input) { + static DateFormatter forPattern(String input) { if (Strings.hasLength(input)) { input = input.trim(); } @@ -1331,7 +1406,7 @@ public static DateFormatter forPattern(String input) { } else if ("hourMinuteSecond".equals(input) || "hour_minute_second".equals(input)) { return HOUR_MINUTE_SECOND; } else if ("hourMinuteSecondFraction".equals(input) || "hour_minute_second_fraction".equals(input)) { - return HOUR_MINUTE_SECOND_MILLIS; + return HOUR_MINUTE_SECOND_FRACTION; } else if ("hourMinuteSecondMillis".equals(input) || "hour_minute_second_millis".equals(input)) { return HOUR_MINUTE_SECOND_MILLIS; } else if ("ordinalDate".equals(input) || "ordinal_date".equals(input)) { diff --git a/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java b/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java index 1e997cce23be8..3ba392822ca0c 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java @@ -21,6 +21,7 @@ import org.joda.time.DateTimeZone; +import java.time.Instant; import java.time.ZoneId; import java.util.function.LongSupplier; @@ -32,7 +33,7 @@ public interface DateMathParser { /** * Parse a date math expression without timzeone info and rounding down. */ - default long parse(String text, LongSupplier now) { + default Instant parse(String text, LongSupplier now) { return parse(text, now, false, (ZoneId) null); } @@ -42,7 +43,7 @@ default long parse(String text, LongSupplier now) { // exists for backcompat, do not use! @Deprecated - default long parse(String text, LongSupplier now, boolean roundUp, DateTimeZone tz) { + default Instant parse(String text, LongSupplier now, boolean roundUp, DateTimeZone tz) { return parse(text, now, roundUp, tz == null ? null : ZoneId.of(tz.getID())); } @@ -68,7 +69,7 @@ default long parse(String text, LongSupplier now, boolean roundUp, DateTimeZone * @param now a supplier to retrieve the current date in milliseconds, if needed for additions * @param roundUp should the result be rounded up * @param tz an optional timezone that should be applied before returning the milliseconds since the epoch - * @return the parsed date in milliseconds since the epoch + * @return the parsed date as an Instant since the epoch */ - long parse(String text, LongSupplier now, boolean roundUp, ZoneId tz); + Instant parse(String text, LongSupplier now, boolean roundUp, ZoneId tz); } diff --git a/server/src/main/java/org/elasticsearch/common/time/DateUtils.java b/server/src/main/java/org/elasticsearch/common/time/DateUtils.java index c46cee881a1a0..e913a69dca776 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateUtils.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateUtils.java @@ -65,12 +65,16 @@ public static ZoneId dateTimeZoneToZoneId(DateTimeZone timeZone) { return ZoneOffset.UTC; } - String deprecatedId = DEPRECATED_SHORT_TIMEZONES.get(timeZone.getID()); + return of(timeZone.getID()); + } + + public static ZoneId of(String zoneId) { + String deprecatedId = DEPRECATED_SHORT_TIMEZONES.get(zoneId); if (deprecatedId != null) { deprecationLogger.deprecatedAndMaybeLog("timezone", - "Use of short timezone id " + timeZone.getID() + " is deprecated. Use " + deprecatedId + " instead"); + "Use of short timezone id " + zoneId + " is deprecated. Use " + deprecatedId + " instead"); return ZoneId.of(deprecatedId); } - return ZoneId.of(timeZone.getID()); + return ZoneId.of(zoneId).normalized(); } } diff --git a/server/src/main/java/org/elasticsearch/common/time/EpochTime.java b/server/src/main/java/org/elasticsearch/common/time/EpochTime.java index 7e0f17c5f6d9c..c824a7c7e7c35 100644 --- a/server/src/main/java/org/elasticsearch/common/time/EpochTime.java +++ b/server/src/main/java/org/elasticsearch/common/time/EpochTime.java @@ -19,6 +19,8 @@ package org.elasticsearch.common.time; +import org.elasticsearch.bootstrap.JavaVersion; + import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; import java.time.format.ResolverStyle; @@ -99,6 +101,10 @@ public TemporalAccessor resolve(Map fieldValues, } fieldValues.put(ChronoField.INSTANT_SECONDS, seconds); fieldValues.put(ChronoField.NANO_OF_SECOND, nanos); + // if there is already a milli of second, we need to overwrite it + if (fieldValues.containsKey(ChronoField.MILLI_OF_SECOND)) { + fieldValues.put(ChronoField.MILLI_OF_SECOND, nanos / 1_000_000); + } return null; } }; @@ -106,7 +112,8 @@ public TemporalAccessor resolve(Map fieldValues, private static final EpochField NANOS_OF_MILLI = new EpochField(ChronoUnit.NANOS, ChronoUnit.MILLIS, ValueRange.of(0, 999_999)) { @Override public boolean isSupportedBy(TemporalAccessor temporal) { - return temporal.isSupported(ChronoField.NANO_OF_SECOND) && temporal.getLong(ChronoField.NANO_OF_SECOND) % 1_000_000 != 0; + return temporal.isSupported(ChronoField.INSTANT_SECONDS) && temporal.isSupported(ChronoField.NANO_OF_SECOND) + && temporal.getLong(ChronoField.NANO_OF_SECOND) % 1_000_000 != 0; } @Override public long getFrom(TemporalAccessor temporal) { @@ -156,9 +163,20 @@ public long getFrom(TemporalAccessor temporal) { builder -> builder.parseDefaulting(ChronoField.NANO_OF_SECOND, 999_999_999L), SECONDS_FORMATTER1, SECONDS_FORMATTER2, SECONDS_FORMATTER3); - static final DateFormatter MILLIS_FORMATTER = new JavaDateFormatter("epoch_millis", MILLISECONDS_FORMATTER3, - builder -> builder.parseDefaulting(EpochTime.NANOS_OF_MILLI, 999_999L), - MILLISECONDS_FORMATTER1, MILLISECONDS_FORMATTER2, MILLISECONDS_FORMATTER3); + static final DateFormatter MILLIS_FORMATTER = getEpochMillisFormatter(); + + private static DateFormatter getEpochMillisFormatter() { + // the third formatter fails under java 8 as a printer, so fall back to this one + final DateTimeFormatter printer; + if (JavaVersion.current().getVersion().get(0) == 8) { + printer = MILLISECONDS_FORMATTER1; + } else { + printer = MILLISECONDS_FORMATTER3; + } + return new JavaDateFormatter("epoch_millis", printer, + builder -> builder.parseDefaulting(EpochTime.NANOS_OF_MILLI, 999_999L), + MILLISECONDS_FORMATTER1, MILLISECONDS_FORMATTER2, MILLISECONDS_FORMATTER3); + } private abstract static class EpochField implements TemporalField { diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java index 20ef593a32610..bcdf9cbdcf674 100644 --- a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java @@ -24,6 +24,7 @@ import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; +import java.time.format.DateTimeParseException; import java.time.temporal.ChronoField; import java.time.temporal.TemporalAccessor; import java.time.temporal.TemporalField; @@ -76,6 +77,8 @@ private JavaDateFormatter(String format, DateTimeFormatter printer, DateTimeForm if (distinctLocales > 1) { throw new IllegalArgumentException("formatters must have the same locale"); } + this.printer = printer; + this.format = format; if (parsers.length == 0) { this.parser = printer; } else if (parsers.length == 1) { @@ -87,11 +90,11 @@ private JavaDateFormatter(String format, DateTimeFormatter printer, DateTimeForm } this.parser = builder.toFormatter(Locale.ROOT); } - this.format = format; - this.printer = printer; DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder(); - builder.append(this.parser); + if (format.contains("||") == false) { + builder.append(this.parser); + } roundupParserConsumer.accept(builder); DateTimeFormatter roundupFormatter = builder.toFormatter(parser.getLocale()); if (printer.getZone() != null) { @@ -117,7 +120,12 @@ public TemporalAccessor parse(String input) { if (Strings.isNullOrEmpty(input)) { throw new IllegalArgumentException("cannot parse empty date"); } - return parser.parse(input); + + try { + return parser.parse(input); + } catch (DateTimeParseException e) { + throw new IllegalArgumentException("failed to parse date field [" + input + "] with format [" + format + "]", e); + } } @Override @@ -162,7 +170,7 @@ public ZoneId zone() { @Override public DateMathParser toDateMathParser() { - return new JavaDateMathParser(parser, roundupParser); + return new JavaDateMathParser(format, parser, roundupParser); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java b/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java index 3c9a1615a6c01..9ee390ba391a7 100644 --- a/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java +++ b/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java @@ -22,7 +22,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; -import java.time.DateTimeException; import java.time.DayOfWeek; import java.time.Instant; import java.time.LocalTime; @@ -30,6 +29,7 @@ import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeParseException; import java.time.temporal.ChronoField; import java.time.temporal.TemporalAccessor; import java.time.temporal.TemporalAdjusters; @@ -48,20 +48,23 @@ public class JavaDateMathParser implements DateMathParser { private final DateTimeFormatter formatter; private final DateTimeFormatter roundUpFormatter; + private final String format; - public JavaDateMathParser(DateTimeFormatter formatter, DateTimeFormatter roundUpFormatter) { + JavaDateMathParser(String format, DateTimeFormatter formatter, DateTimeFormatter roundUpFormatter) { + this.format = format; Objects.requireNonNull(formatter); this.formatter = formatter; this.roundUpFormatter = roundUpFormatter; } @Override - public long parse(String text, LongSupplier now, boolean roundUp, ZoneId timeZone) { - long time; + public Instant parse(String text, LongSupplier now, boolean roundUp, ZoneId timeZone) { + Instant time; String mathString; if (text.startsWith("now")) { try { - time = now.getAsLong(); + // TODO only millisecond granularity here! + time = Instant.ofEpochMilli(now.getAsLong()); } catch (Exception e) { throw new ElasticsearchParseException("could not read the current timestamp", e); } @@ -78,12 +81,12 @@ public long parse(String text, LongSupplier now, boolean roundUp, ZoneId timeZon return parseMath(mathString, time, roundUp, timeZone); } - private long parseMath(final String mathString, final long time, final boolean roundUp, + private Instant parseMath(final String mathString, final Instant time, final boolean roundUp, ZoneId timeZone) throws ElasticsearchParseException { if (timeZone == null) { timeZone = ZoneOffset.UTC; } - ZonedDateTime dateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(time), timeZone); + ZonedDateTime dateTime = ZonedDateTime.ofInstant(time, timeZone); for (int i = 0; i < mathString.length(); ) { char c = mathString.charAt(i++); final boolean round; @@ -204,18 +207,18 @@ private long parseMath(final String mathString, final long time, final boolean r dateTime = dateTime.minus(1, ChronoField.MILLI_OF_SECOND.getBaseUnit()); } } - return dateTime.toInstant().toEpochMilli(); + return dateTime.toInstant(); } - private long parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTime) { + private Instant parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTime) { if (Strings.isNullOrEmpty(value)) { - throw new IllegalArgumentException("cannot parse empty date"); + throw new ElasticsearchParseException("cannot parse empty date"); } DateTimeFormatter formatter = roundUpIfNoTime ? this.roundUpFormatter : this.formatter; try { if (timeZone == null) { - return DateFormatters.toZonedDateTime(formatter.parse(value)).toInstant().toEpochMilli(); + return DateFormatters.toZonedDateTime(formatter.parse(value)).toInstant(); } else { TemporalAccessor accessor = formatter.parse(value); ZoneId zoneId = TemporalQueries.zone().queryFrom(accessor); @@ -223,10 +226,11 @@ private long parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTim timeZone = zoneId; } - return DateFormatters.toZonedDateTime(accessor).withZoneSameLocal(timeZone).toInstant().toEpochMilli(); + return DateFormatters.toZonedDateTime(accessor).withZoneSameLocal(timeZone).toInstant(); } - } catch (IllegalArgumentException | DateTimeException e) { - throw new ElasticsearchParseException("failed to parse date field [{}]: [{}]", e, value, e.getMessage()); + } catch (DateTimeParseException e) { + throw new ElasticsearchParseException("failed to parse date field [{}] with format [{}]: [{}]", + e, value, format, e.getMessage()); } } } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentElasticsearchExtension.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentElasticsearchExtension.java index f32ba715a8068..3f731b73dc870 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentElasticsearchExtension.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentElasticsearchExtension.java @@ -22,7 +22,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.script.JodaCompatibleZonedDateTime; @@ -65,9 +64,9 @@ public class XContentElasticsearchExtension implements XContentBuilderExtension { public static final DateTimeFormatter DEFAULT_DATE_PRINTER = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC); - public static final DateFormatter DEFAULT_FORMATTER = DateFormatters.forPattern("strict_date_optional_time_nanos"); - public static final DateFormatter LOCAL_TIME_FORMATTER = DateFormatters.forPattern("HH:mm:ss.SSS"); - public static final DateFormatter OFFSET_TIME_FORMATTER = DateFormatters.forPattern("HH:mm:ss.SSSZZZZZ"); + public static final DateFormatter DEFAULT_FORMATTER = DateFormatter.forPattern("strict_date_optional_time_nanos"); + public static final DateFormatter LOCAL_TIME_FORMATTER = DateFormatter.forPattern("HH:mm:ss.SSS"); + public static final DateFormatter OFFSET_TIME_FORMATTER = DateFormatter.forPattern("HH:mm:ss.SSSZZZZZ"); @Override public Map, XContentBuilder.Writer> getXContentWriters() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java index 69b6a6e04a936..7a5bd97770297 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.hppc.ObjectArrayList; - import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; @@ -41,9 +40,9 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.search.DocValueFormat; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; import java.util.Base64; import java.util.List; import java.util.Map; @@ -108,7 +107,7 @@ public String typeName() { } @Override - public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) { + public DocValueFormat docValueFormat(String format, ZoneId timeZone) { return DocValueFormat.BINARY; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 9e0b9f62acbe7..caf8baac24da1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -40,9 +40,9 @@ import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -190,7 +190,7 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) { } @Override - public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { + public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { if (format != null) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats"); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 1e17aab31605b..0dcf52d5e54f2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -33,13 +33,15 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.time.DateMathParser; -import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.util.LocaleUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -49,18 +51,17 @@ import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; +import java.time.ZoneOffset; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; -import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter; - -/** A {@link FieldMapper} for ip addresses. */ +/** A {@link FieldMapper} for dates. */ public class DateFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "date"; @@ -73,8 +74,8 @@ public static class Defaults { public static class Builder extends FieldMapper.Builder { private Boolean ignoreMalformed; + private Explicit format = new Explicit<>(DEFAULT_DATE_TIME_FORMATTER.pattern(), false); private Locale locale; - private boolean dateTimeFormatterSet = false; public Builder(String name) { super(name, new DateFieldType(), new DateFieldType()); @@ -102,27 +103,37 @@ protected Explicit ignoreMalformed(BuilderContext context) { return Defaults.IGNORE_MALFORMED; } - /** Whether an explicit format for this date field has been set already. */ - public boolean isDateTimeFormatterSet() { - return dateTimeFormatterSet; + public Builder locale(Locale locale) { + this.locale = locale; + return this; + } + + public Locale locale() { + return locale; } - public Builder dateTimeFormatter(DateFormatter dateTimeFormatter) { - fieldType().setDateTimeFormatter(dateTimeFormatter); - dateTimeFormatterSet = true; + public String format() { + return format.value(); + } + + public Builder format(String format) { + this.format = new Explicit<>(format, true); return this; } - public void locale(Locale locale) { - this.locale = locale; + public boolean isFormatterSet() { + return format.explicit(); } @Override protected void setupFieldType(BuilderContext context) { super.setupFieldType(context); + String pattern = this.format.value(); DateFormatter dateTimeFormatter = fieldType().dateTimeFormatter; - if (!locale.equals(dateTimeFormatter.locale())) { - fieldType().setDateTimeFormatter(dateTimeFormatter.withLocale(locale)); + + boolean hasPatternChanged = Strings.hasLength(pattern) && Objects.equals(pattern, dateTimeFormatter.pattern()) == false; + if (hasPatternChanged || Objects.equals(builder.locale, dateTimeFormatter.locale()) == false) { + fieldType().setDateTimeFormatter(DateFormatter.forPattern(pattern).withLocale(locale)); } } @@ -160,7 +171,7 @@ public Mapper.Builder parse(String name, Map node, ParserCo builder.locale(LocaleUtils.parse(propNode.toString())); iterator.remove(); } else if (propName.equals("format")) { - builder.dateTimeFormatter(parseDateTimeFormatter(propNode)); + builder.format(propNode.toString()); iterator.remove(); } else if (TypeParsers.parseMultiField(builder, name, parserContext, propName, propNode)) { iterator.remove(); @@ -196,13 +207,12 @@ public MappedFieldType clone() { public boolean equals(Object o) { if (!super.equals(o)) return false; DateFieldType that = (DateFieldType) o; - return Objects.equals(dateTimeFormatter.pattern(), that.dateTimeFormatter.pattern()) && - Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale()); + return Objects.equals(dateTimeFormatter, that.dateTimeFormatter); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), dateTimeFormatter.pattern(), dateTimeFormatter.locale()); + return Objects.hash(super.hashCode(), dateTimeFormatter); } @Override @@ -214,10 +224,10 @@ public String typeName() { public void checkCompatibility(MappedFieldType fieldType, List conflicts) { super.checkCompatibility(fieldType, conflicts); DateFieldType other = (DateFieldType) fieldType; - if (Objects.equals(dateTimeFormatter().pattern(), other.dateTimeFormatter().pattern()) == false) { + if (Objects.equals(dateTimeFormatter.pattern(), other.dateTimeFormatter.pattern()) == false) { conflicts.add("mapper [" + name() + "] has different [format] values"); } - if (Objects.equals(dateTimeFormatter().locale(), other.dateTimeFormatter().locale()) == false) { + if (Objects.equals(dateTimeFormatter.locale(), other.dateTimeFormatter.locale()) == false) { conflicts.add("mapper [" + name() + "] has different [locale] values"); } } @@ -226,9 +236,9 @@ public DateFormatter dateTimeFormatter() { return dateTimeFormatter; } - public void setDateTimeFormatter(DateFormatter dateTimeFormatter) { + void setDateTimeFormatter(DateFormatter formatter) { checkIfFrozen(); - this.dateTimeFormatter = dateTimeFormatter; + this.dateTimeFormatter = formatter; this.dateMathParser = dateTimeFormatter.toDateMathParser(); } @@ -237,7 +247,7 @@ protected DateMathParser dateMathParser() { } long parse(String value) { - return dateTimeFormatter().parseMillis(value); + return DateFormatters.toZonedDateTime(dateTimeFormatter().parse(value)).toInstant().toEpochMilli(); } @Override @@ -260,7 +270,7 @@ public Query termQuery(Object value, @Nullable QueryShardContext context) { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, ShapeRelation relation, - @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) { + @Nullable ZoneId timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) { failIfNotIndexed(); if (relation == ShapeRelation.DISJOINT) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + @@ -294,8 +304,8 @@ public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower return query; } - public long parseToMilliseconds(Object value, boolean roundUp, @Nullable DateTimeZone zone, - @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) { + public long parseToMilliseconds(Object value, boolean roundUp, + @Nullable ZoneId zone, @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) { DateMathParser dateParser = dateMathParser(); if (forcedDateParser != null) { dateParser = forcedDateParser; @@ -307,13 +317,13 @@ public long parseToMilliseconds(Object value, boolean roundUp, @Nullable DateTim } else { strValue = value.toString(); } - return dateParser.parse(strValue, context::nowInMillis, roundUp, DateUtils.dateTimeZoneToZoneId(zone)); + return dateParser.parse(strValue, context::nowInMillis, roundUp, zone).toEpochMilli(); } @Override - public Relation isFieldWithinQuery(IndexReader reader, Object from, Object to, boolean includeLower, boolean includeUpper, - DateTimeZone timeZone, DateMathParser dateParser, - QueryRewriteContext context) throws IOException { + public Relation isFieldWithinQuery(IndexReader reader, + Object from, Object to, boolean includeLower, boolean includeUpper, + ZoneId timeZone, DateMathParser dateParser, QueryRewriteContext context) throws IOException { if (dateParser == null) { dateParser = this.dateMathParser; } @@ -376,13 +386,13 @@ public Object valueForDisplay(Object value) { } @Override - public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { + public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { DateFormatter dateTimeFormatter = this.dateTimeFormatter; if (format != null) { - dateTimeFormatter = DateFormatter.forPattern(format); + dateTimeFormatter = DateFormatter.forPattern(format).withLocale(dateTimeFormatter.locale()); } if (timeZone == null) { - timeZone = DateTimeZone.UTC; + timeZone = ZoneOffset.UTC; } return new DocValueFormat.DateTime(dateTimeFormatter, timeZone); } @@ -442,7 +452,7 @@ protected void parseCreateField(ParseContext context, List field long timestamp; try { timestamp = fieldType().parse(dateAsString); - } catch (IllegalArgumentException e) { + } catch (IllegalArgumentException | ElasticsearchParseException e) { if (ignoreMalformed.value()) { context.addIgnoredField(fieldType.name()); return; @@ -489,8 +499,9 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, || fieldType().dateTimeFormatter().pattern().equals(DEFAULT_DATE_TIME_FORMATTER.pattern()) == false) { builder.field("format", fieldType().dateTimeFormatter().pattern()); } + if (includeDefaults - || fieldType().dateTimeFormatter().locale() != Locale.ROOT) { + || fieldType().dateTimeFormatter().locale().equals(DEFAULT_DATE_TIME_FORMATTER.locale()) == false) { builder.field("locale", fieldType().dateTimeFormatter().locale()); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 54e59691f80d5..fe2bd6e9eed59 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -21,6 +21,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; @@ -35,6 +36,7 @@ import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import java.io.IOException; +import java.time.format.DateTimeParseException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -672,7 +674,7 @@ private static Mapper.Builder createBuilderFromFieldType(final ParseContext private static Mapper.Builder newDateBuilder(String name, DateFormatter dateTimeFormatter, Version indexCreated) { DateFieldMapper.Builder builder = new DateFieldMapper.Builder(name); if (dateTimeFormatter != null) { - builder.dateTimeFormatter(dateTimeFormatter); + builder.format(dateTimeFormatter.pattern()).locale(dateTimeFormatter.locale()); } return builder; } @@ -717,8 +719,8 @@ private static Mapper.Builder createBuilderFromDynamicValue(final ParseCont // `epoch_millis` or `YYYY` for (DateFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) { try { - dateTimeFormatter.parseMillis(text); - } catch (IllegalArgumentException e) { + dateTimeFormatter.parse(text); + } catch (ElasticsearchParseException | DateTimeParseException | IllegalArgumentException e) { // failure to parse this, continue continue; } @@ -728,8 +730,8 @@ private static Mapper.Builder createBuilderFromDynamicValue(final ParseCont } if (builder instanceof DateFieldMapper.Builder) { DateFieldMapper.Builder dateBuilder = (DateFieldMapper.Builder) builder; - if (dateBuilder.isDateTimeFormatterSet() == false) { - dateBuilder.dateTimeFormatter(dateTimeFormatter); + if (dateBuilder.isFormatterSet() == false) { + dateBuilder.format(dateTimeFormatter.pattern()).locale(dateTimeFormatter.locale()); } } return builder; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index a8ef46b93060e..2b52e42ffe558 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -44,10 +44,10 @@ import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; -import org.joda.time.DateTimeZone; import java.io.IOException; import java.net.InetAddress; +import java.time.ZoneId; import java.util.Arrays; import java.util.Iterator; import java.util.List; @@ -303,7 +303,7 @@ public Object valueForDisplay(Object value) { } @Override - public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { + public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { if (format != null) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats"); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index f785e01125f69..5ef689709400d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -50,9 +50,9 @@ import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.search.DocValueFormat; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; import java.util.List; import java.util.Objects; @@ -335,10 +335,10 @@ public Query termsQuery(List values, @Nullable QueryShardContext context) { * @param relation the relation, nulls should be interpreted like INTERSECTS */ public Query rangeQuery( - Object lowerTerm, Object upperTerm, - boolean includeLower, boolean includeUpper, - ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser, - QueryShardContext context) { + Object lowerTerm, Object upperTerm, + boolean includeLower, boolean includeUpper, + ShapeRelation relation, ZoneId timeZone, DateMathParser parser, + QueryShardContext context) { throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support range queries"); } @@ -413,7 +413,7 @@ public Relation isFieldWithinQuery( IndexReader reader, Object from, Object to, boolean includeLower, boolean includeUpper, - DateTimeZone timeZone, DateMathParser dateMathParser, QueryRewriteContext context) throws IOException { + ZoneId timeZone, DateMathParser dateMathParser, QueryRewriteContext context) throws IOException { return Relation.INTERSECTS; } @@ -448,7 +448,7 @@ public void setEagerGlobalOrdinals(boolean eagerGlobalOrdinals) { /** Return a {@link DocValueFormat} that can be used to display and parse * values as returned by the fielddata API. * The default implementation returns a {@link DocValueFormat#RAW}. */ - public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { + public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { if (format != null) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats"); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 8d9a688776548..06e12ca8b5e4c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -53,9 +53,9 @@ import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; @@ -961,7 +961,7 @@ public Object valueForDisplay(Object value) { } @Override - public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) { + public DocValueFormat docValueFormat(String format, ZoneId timeZone) { if (timeZone != null) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones"); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java index d93c909ff8445..e5ba55de7bfd0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java @@ -42,6 +42,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.network.InetAddresses; @@ -49,19 +50,18 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateMathParser; -import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.util.LocaleUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.index.query.QueryShardContext; -import org.joda.time.DateTimeZone; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.time.ZoneId; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; @@ -71,7 +71,6 @@ import java.util.Objects; import java.util.Set; -import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter; import static org.elasticsearch.index.query.RangeQueryBuilder.GTE_FIELD; import static org.elasticsearch.index.query.RangeQueryBuilder.GT_FIELD; import static org.elasticsearch.index.query.RangeQueryBuilder.LTE_FIELD; @@ -92,12 +91,12 @@ public static class Defaults { public static class Builder extends FieldMapper.Builder { private Boolean coerce; - private Locale locale; + private Locale locale = Locale.ROOT; + private String pattern; public Builder(String name, RangeType type) { super(name, new RangeFieldType(type), new RangeFieldType(type)); builder = this; - locale = Locale.ROOT; } @Override @@ -128,8 +127,8 @@ protected Explicit coerce(BuilderContext context) { return Defaults.COERCE; } - public Builder dateTimeFormatter(DateFormatter dateTimeFormatter) { - fieldType().setDateTimeFormatter(dateTimeFormatter); + public Builder format(String format) { + this.pattern = format; return this; } @@ -145,12 +144,15 @@ public void locale(Locale locale) { @Override protected void setupFieldType(BuilderContext context) { super.setupFieldType(context); - DateFormatter dateTimeFormatter = fieldType().dateTimeFormatter; + DateFormatter formatter = fieldType().dateTimeFormatter; if (fieldType().rangeType == RangeType.DATE) { - if (!locale.equals(dateTimeFormatter.locale())) { - fieldType().setDateTimeFormatter(dateTimeFormatter.withLocale(locale)); + boolean hasPatternChanged = Strings.hasLength(builder.pattern) && + Objects.equals(builder.pattern, formatter.pattern()) == false; + + if (hasPatternChanged || Objects.equals(builder.locale, formatter.locale()) == false) { + fieldType().setDateTimeFormatter(DateFormatter.forPattern(pattern).withLocale(locale)); } - } else if (dateTimeFormatter != null) { + } else if (pattern != null) { throw new IllegalArgumentException("field [" + name() + "] of type [" + fieldType().rangeType + "] should not define a dateTimeFormatter unless it is a " + RangeType.DATE + " type"); } @@ -190,7 +192,7 @@ public Mapper.Builder parse(String name, Map node, builder.locale(LocaleUtils.parse(propNode.toString())); iterator.remove(); } else if (propName.equals("format")) { - builder.dateTimeFormatter(parseDateTimeFormatter(propNode)); + builder.format(propNode.toString()); iterator.remove(); } else if (TypeParsers.parseMultiField(builder, name, parserContext, propName, propNode)) { iterator.remove(); @@ -219,8 +221,8 @@ public static final class RangeFieldType extends MappedFieldType { RangeFieldType(RangeFieldType other) { super(other); this.rangeType = other.rangeType; - if (other.dateTimeFormatter() != null) { - setDateTimeFormatter(other.dateTimeFormatter); + if (other.rangeType == RangeType.DATE && other.dateTimeFormatter() != null) { + setDateTimeFormatter(other.dateTimeFormatter()); } } @@ -235,15 +237,13 @@ public boolean equals(Object o) { RangeFieldType that = (RangeFieldType) o; return Objects.equals(rangeType, that.rangeType) && (rangeType == RangeType.DATE) ? - Objects.equals(dateTimeFormatter.pattern(), that.dateTimeFormatter.pattern()) - && Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale()) + Objects.equals(dateTimeFormatter, that.dateTimeFormatter) : dateTimeFormatter == null && that.dateTimeFormatter == null; } @Override public int hashCode() { - return (dateTimeFormatter == null) ? Objects.hash(super.hashCode(), rangeType) - : Objects.hash(super.hashCode(), rangeType, dateTimeFormatter.pattern(), dateTimeFormatter.locale()); + return Objects.hash(super.hashCode(), rangeType, dateTimeFormatter); } @Override @@ -285,7 +285,7 @@ public Query termQuery(Object value, QueryShardContext context) { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, - ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser, QueryShardContext context) { + ShapeRelation relation, ZoneId timeZone, DateMathParser parser, QueryShardContext context) { failIfNotIndexed(); if (parser == null) { parser = dateMathParser(); @@ -543,7 +543,8 @@ public Field getRangeField(String name, Range r) { return new LongRange(name, new long[] {((Number)r.from).longValue()}, new long[] {((Number)r.to).longValue()}); } private Number parse(DateMathParser dateMathParser, String dateStr) { - return dateMathParser.parse(dateStr, () -> {throw new IllegalArgumentException("now is not used at indexing time");}); + return dateMathParser.parse(dateStr, () -> {throw new IllegalArgumentException("now is not used at indexing time");}) + .toEpochMilli(); } @Override public Number parseFrom(RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included) @@ -586,18 +587,18 @@ public Query dvRangeQuery(String field, QueryType queryType, Object from, Object @Override public Query rangeQuery(String field, boolean hasDocValues, Object lowerTerm, Object upperTerm, boolean includeLower, - boolean includeUpper, ShapeRelation relation, @Nullable DateTimeZone timeZone, + boolean includeUpper, ShapeRelation relation, @Nullable ZoneId timeZone, @Nullable DateMathParser parser, QueryShardContext context) { - DateTimeZone zone = (timeZone == null) ? DateTimeZone.UTC : timeZone; - ZoneId zoneId = DateUtils.dateTimeZoneToZoneId(zone); + ZoneId zone = (timeZone == null) ? ZoneOffset.UTC : timeZone; + DateMathParser dateMathParser = (parser == null) ? DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser() : parser; Long low = lowerTerm == null ? Long.MIN_VALUE : dateMathParser.parse(lowerTerm instanceof BytesRef ? ((BytesRef) lowerTerm).utf8ToString() : lowerTerm.toString(), - context::nowInMillis, false, zoneId); + context::nowInMillis, false, zone).toEpochMilli(); Long high = upperTerm == null ? Long.MAX_VALUE : dateMathParser.parse(upperTerm instanceof BytesRef ? ((BytesRef) upperTerm).utf8ToString() : upperTerm.toString(), - context::nowInMillis, false, zoneId); + context::nowInMillis, false, zone).toEpochMilli(); return super.rangeQuery(field, hasDocValues, low, high, includeLower, includeUpper, relation, zone, dateMathParser, context); @@ -910,7 +911,7 @@ public Object parse(Object value, boolean coerce) { return numberType.parse(value, coerce); } public Query rangeQuery(String field, boolean hasDocValues, Object from, Object to, boolean includeFrom, boolean includeTo, - ShapeRelation relation, @Nullable DateTimeZone timeZone, @Nullable DateMathParser dateMathParser, + ShapeRelation relation, @Nullable ZoneId timeZone, @Nullable DateMathParser dateMathParser, QueryShardContext context) { Object lower = from == null ? minValue() : parse(from, false); Object upper = to == null ? maxValue() : parse(to, false); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java index f35e6126dbe71..6d2f0fddd86c2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java @@ -22,7 +22,6 @@ import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.xcontent.ToXContent; @@ -46,7 +45,7 @@ public static class Defaults { public static final DateFormatter[] DYNAMIC_DATE_TIME_FORMATTERS = new DateFormatter[]{ DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, - Joda.getStrictStandardDateFormatter() + DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis") }; public static final boolean DATE_DETECTION = true; public static final boolean NUMERIC_DETECTION = false; @@ -55,8 +54,7 @@ public static class Defaults { public static class Builder extends ObjectMapper.Builder { protected Explicit dynamicTemplates = new Explicit<>(new DynamicTemplate[0], false); - protected Explicit dynamicDateTimeFormatters = - new Explicit<>(Defaults.DYNAMIC_DATE_TIME_FORMATTERS, false); + protected Explicit dynamicDateTimeFormatters = new Explicit<>(Defaults.DYNAMIC_DATE_TIME_FORMATTERS, false); protected Explicit dateDetection = new Explicit<>(Defaults.DATE_DETECTION, false); protected Explicit numericDetection = new Explicit<>(Defaults.NUMERIC_DETECTION, false); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java index 3d3b160787050..366eb3b36f0fe 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java @@ -23,7 +23,8 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.index.query.QueryShardContext; -import org.joda.time.DateTimeZone; + +import java.time.ZoneId; /** * {@link MappedFieldType} base impl for field types that are neither dates nor ranges. @@ -40,7 +41,7 @@ protected SimpleMappedFieldType(MappedFieldType ref) { @Override public final Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, - ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser, QueryShardContext context) { + ShapeRelation relation, ZoneId timeZone, DateMathParser parser, QueryShardContext context) { if (relation == ShapeRelation.DISJOINT) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support DISJOINT ranges"); @@ -52,7 +53,7 @@ public final Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includ } /** - * Same as {@link #rangeQuery(Object, Object, boolean, boolean, ShapeRelation, DateTimeZone, DateMathParser, QueryShardContext)} + * Same as {@link #rangeQuery(Object, Object, boolean, boolean, ShapeRelation, ZoneId, DateMathParser, QueryShardContext)} * but without the trouble of relations or date-specific options. */ protected Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java index d93caf3c4e8d1..8cf66009ea140 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java @@ -43,7 +43,6 @@ public class TypeParsers { public static final String INDEX_OPTIONS_POSITIONS = "positions"; public static final String INDEX_OPTIONS_OFFSETS = "offsets"; - private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, String name, Map fieldNode, Mapper.TypeParser.ParserContext parserContext) { NamedAnalyzer indexAnalyzer = null; diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index 6ae9055efcefc..363384030a2ac 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -38,9 +38,9 @@ import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.index.search.QueryParserHelper; import org.elasticsearch.index.search.QueryStringQueryParser; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; import java.util.ArrayList; import java.util.List; import java.util.Locale; @@ -144,7 +144,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder i private static final ParseField RELATION_FIELD = new ParseField("relation"); private final String fieldName; - private Object from; - private Object to; - - private DateTimeZone timeZone; - + private ZoneId timeZone; private boolean includeLower = DEFAULT_INCLUDE_LOWER; - private boolean includeUpper = DEFAULT_INCLUDE_UPPER; - - private DateFormatter format; - + private String format; private ShapeRelation relation; /** @@ -101,11 +95,8 @@ public RangeQueryBuilder(StreamInput in) throws IOException { to = in.readGenericValue(); includeLower = in.readBoolean(); includeUpper = in.readBoolean(); - timeZone = in.readOptionalTimeZone(); - String formatString = in.readOptionalString(); - if (formatString != null) { - format = DateFormatter.forPattern(formatString); - } + timeZone = in.readOptionalZoneId(); + format = in.readOptionalString(); String relationString = in.readOptionalString(); if (relationString != null) { relation = ShapeRelation.getRelationByName(relationString); @@ -129,12 +120,8 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeGenericValue(this.to); out.writeBoolean(this.includeLower); out.writeBoolean(this.includeUpper); - out.writeOptionalTimeZone(timeZone); - String formatString = null; - if (this.format != null) { - formatString = this.format.pattern(); - } - out.writeOptionalString(formatString); + out.writeOptionalZoneId(timeZone); + out.writeOptionalString(format); String relationString = null; if (this.relation != null) { relationString = this.relation.getRelationName(); @@ -267,7 +254,11 @@ public RangeQueryBuilder timeZone(String timeZone) { if (timeZone == null) { throw new IllegalArgumentException("timezone cannot be null"); } - this.timeZone = DateTimeZone.forID(timeZone); + try { + this.timeZone = ZoneId.of(timeZone); + } catch (DateTimeException e) { + throw new IllegalArgumentException(e); + } return this; } @@ -275,10 +266,10 @@ public RangeQueryBuilder timeZone(String timeZone) { * In case of date field, gets the from/to fields timezone adjustment */ public String timeZone() { - return this.timeZone == null ? null : this.timeZone.getID(); + return this.timeZone == null ? null : this.timeZone.getId(); } - DateTimeZone getDateTimeZone() { // for testing + ZoneId getDateTimeZone() { // for testing return timeZone; } @@ -289,7 +280,9 @@ public RangeQueryBuilder format(String format) { if (format == null) { throw new IllegalArgumentException("format cannot be null"); } - this.format = DateFormatter.forPattern(format); + // this just ensure that the pattern is actually valid, no need to keep it here + DateFormatter.forPattern(format); + this.format = format; return this; } @@ -297,12 +290,12 @@ public RangeQueryBuilder format(String format) { * Gets the format field to parse the from/to fields */ public String format() { - return this.format == null ? null : this.format.pattern(); + return format; } DateMathParser getForceDateParser() { // pkg private for testing - if (this.format != null) { - return this.format.toDateMathParser(); + if (Strings.hasText(format)) { + return DateFormatter.forPattern(this.format).toDateMathParser(); } return null; } @@ -334,10 +327,10 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep builder.field(INCLUDE_LOWER_FIELD.getPreferredName(), includeLower); builder.field(INCLUDE_UPPER_FIELD.getPreferredName(), includeUpper); if (timeZone != null) { - builder.field(TIME_ZONE_FIELD.getPreferredName(), timeZone.getID()); + builder.field(TIME_ZONE_FIELD.getPreferredName(), timeZone.getId()); } - if (format != null) { - builder.field(FORMAT_FIELD.getPreferredName(), format.pattern()); + if (Strings.hasText(format)) { + builder.field(FORMAT_FIELD.getPreferredName(), format); } if (relation != null) { builder.field(RELATION_FIELD.getPreferredName(), relation.getRelationName()); @@ -531,21 +524,17 @@ protected Query doToQuery(QueryShardContext context) throws IOException { @Override protected int doHashCode() { - String timeZoneId = timeZone == null ? null : timeZone.getID(); - String formatString = format == null ? null : format.pattern(); - return Objects.hash(fieldName, from, to, timeZoneId, includeLower, includeUpper, formatString); + return Objects.hash(fieldName, from, to, timeZone, includeLower, includeUpper, format); } @Override protected boolean doEquals(RangeQueryBuilder other) { - String timeZoneId = timeZone == null ? null : timeZone.getID(); - String formatString = format == null ? null : format.pattern(); return Objects.equals(fieldName, other.fieldName) && Objects.equals(from, other.from) && Objects.equals(to, other.to) && - Objects.equals(timeZoneId, other.timeZone()) && + Objects.equals(timeZone, other.timeZone) && Objects.equals(includeLower, other.includeLower) && Objects.equals(includeUpper, other.includeUpper) && - Objects.equals(formatString, other.format()); + Objects.equals(format, other.format); } } diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java index 4974ef9277e9a..dc5354c7e0522 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java @@ -54,9 +54,9 @@ import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.support.QueryParsers; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -89,7 +89,7 @@ public class QueryStringQueryParser extends XQueryParser { private Analyzer forceQuoteAnalyzer; private String quoteFieldSuffix; private boolean analyzeWildcard; - private DateTimeZone timeZone; + private ZoneId timeZone; private Fuzziness fuzziness = Fuzziness.AUTO; private int fuzzyMaxExpansions = FuzzyQuery.defaultMaxExpansions; private MappedFieldType currentFieldType; @@ -227,7 +227,7 @@ public void setAnalyzeWildcard(boolean analyzeWildcard) { /** * @param timeZone Time Zone to be applied to any range query related to dates. */ - public void setTimeZone(DateTimeZone timeZone) { + public void setTimeZone(ZoneId timeZone) { this.timeZone = timeZone; } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 719558edbf748..90ebc8e074108 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -19,9 +19,6 @@ package org.elasticsearch.ingest; -import java.util.Collections; -import java.util.IdentityHashMap; -import java.util.Set; import org.elasticsearch.common.Strings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.IdFieldMapper; @@ -37,12 +34,15 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; +import java.util.Collections; import java.util.Date; import java.util.EnumMap; import java.util.HashMap; +import java.util.IdentityHashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; /** * Represents a single document being captured before indexing and holds the source and metadata (like id, type and index). diff --git a/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java b/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java index 5b55c00875d47..7bf26fd5e57a4 100644 --- a/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java +++ b/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java @@ -22,7 +22,6 @@ import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.unit.TimeValue; import java.lang.management.ManagementFactory; @@ -43,7 +42,7 @@ public class HotThreads { private static final Object mutex = new Object(); - private static final DateFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("dateOptionalTime"); + private static final DateFormatter DATE_TIME_FORMATTER = DateFormatter.forPattern("dateOptionalTime"); private int busiestThreads = 3; private TimeValue interval = new TimeValue(500, TimeUnit.MILLISECONDS); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 676f2bbdc7b2e..bb449d584b2c8 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -39,7 +39,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.time.DateFormatters; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.rest.RestController; @@ -61,6 +61,7 @@ public class RestIndicesAction extends AbstractCatAction { + private static final DateFormatter STRICT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_time"); private final IndexNameExpressionResolver indexNameExpressionResolver; public RestIndicesAction(Settings settings, RestController controller, IndexNameExpressionResolver indexNameExpressionResolver) { @@ -432,7 +433,7 @@ Table buildTable(RestRequest request, Index[] indices, ClusterHealthResponse res table.addCell(indexMetaData.getCreationDate()); ZonedDateTime creationTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(indexMetaData.getCreationDate()), ZoneOffset.UTC); - table.addCell(DateFormatters.forPattern("strict_date_time").format(creationTime)); + table.addCell(STRICT_DATE_TIME_FORMATTER.format(creationTime)); table.addCell(totalStats.getStore() == null ? null : totalStats.getStore().size()); table.addCell(primaryStats.getStore() == null ? null : primaryStats.getStore().size()); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java index fb302b1b3b3a4..22258ce2d8878 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -99,7 +98,7 @@ protected Table getTableWithHeader(RestRequest request) { .endHeaders(); } - private static final DateFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC); + private static final DateFormatter FORMATTER = DateFormatter.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC); private Table buildTable(RestRequest req, GetSnapshotsResponse getSnapshotsResponse) { Table table = getTableWithHeader(req); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java index 39b3f08dcdc5f..573eac6c04941 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -125,7 +124,7 @@ protected Table getTableWithHeader(final RestRequest request) { return table; } - private static final DateFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC); + private static final DateFormatter FORMATTER = DateFormatter.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC); private void buildRow(Table table, boolean fullId, boolean detailed, DiscoveryNodes discoveryNodes, TaskInfo taskInfo) { table.startRow(); diff --git a/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java b/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java index 546deb3a24b68..fc3816cad8a15 100644 --- a/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java +++ b/server/src/main/java/org/elasticsearch/script/JodaCompatibleZonedDateTime.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.time.DateUtils; import org.joda.time.DateTime; @@ -50,7 +49,7 @@ * A wrapper around ZonedDateTime that exposes joda methods for backcompat. */ public class JodaCompatibleZonedDateTime { - private static final DateFormatter DATE_FORMATTER = DateFormatters.forPattern("strict_date_time"); + private static final DateFormatter DATE_FORMATTER = DateFormatter.forPattern("strict_date_time"); private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(JodaCompatibleZonedDateTime.class)); diff --git a/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java b/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java index a911a4f197d67..753ef1fb23d85 100644 --- a/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java +++ b/server/src/main/java/org/elasticsearch/script/ScoreScriptUtils.java @@ -212,7 +212,7 @@ public static final class DecayDateLinear { double scaling; public DecayDateLinear(String originStr, String scaleStr, String offsetStr, double decay) { - this.origin = dateParser.parse(originStr, null, false, defaultZoneId); + this.origin = dateParser.parse(originStr, null, false, defaultZoneId).toEpochMilli(); long scale = TimeValue.parseTimeValue(scaleStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale") .getMillis(); this.offset = TimeValue.parseTimeValue(offsetStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset") @@ -235,7 +235,7 @@ public static final class DecayDateExp { double scaling; public DecayDateExp(String originStr, String scaleStr, String offsetStr, double decay) { - this.origin = dateParser.parse(originStr, null, false, defaultZoneId); + this.origin = dateParser.parse(originStr, null, false, defaultZoneId).toEpochMilli(); long scale = TimeValue.parseTimeValue(scaleStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale") .getMillis(); this.offset = TimeValue.parseTimeValue(offsetStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset") @@ -258,7 +258,7 @@ public static final class DecayDateGauss { double scaling; public DecayDateGauss(String originStr, String scaleStr, String offsetStr, double decay) { - this.origin = dateParser.parse(originStr, null, false, defaultZoneId); + this.origin = dateParser.parse(originStr, null, false, defaultZoneId).toEpochMilli(); long scale = TimeValue.parseTimeValue(scaleStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale") .getMillis(); this.offset = TimeValue.parseTimeValue(offsetStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset") diff --git a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java index 900e1d7fd09ca..ceefe035d4613 100644 --- a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java +++ b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java @@ -21,6 +21,7 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; @@ -30,7 +31,6 @@ import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.time.DateUtils; -import org.joda.time.DateTimeZone; import java.io.IOException; import java.net.InetAddress; @@ -38,6 +38,7 @@ import java.text.DecimalFormatSymbols; import java.text.NumberFormat; import java.text.ParseException; +import java.time.Instant; import java.time.ZoneId; import java.util.Arrays; import java.util.Base64; @@ -164,20 +165,24 @@ final class DateTime implements DocValueFormat { public static final String NAME = "date_time"; final DateFormatter formatter; - // TODO: change this to ZoneId, but will require careful change to serialization - final DateTimeZone timeZone; - private final ZoneId zoneId; + final ZoneId timeZone; private final DateMathParser parser; - public DateTime(DateFormatter formatter, DateTimeZone timeZone) { - this.formatter = Objects.requireNonNull(formatter); + public DateTime(DateFormatter formatter, ZoneId timeZone) { + this.formatter = formatter; this.timeZone = Objects.requireNonNull(timeZone); - this.zoneId = DateUtils.dateTimeZoneToZoneId(timeZone); this.parser = formatter.toDateMathParser(); } public DateTime(StreamInput in) throws IOException { - this(DateFormatter.forPattern(in.readString()), DateTimeZone.forID(in.readString())); + this.formatter = DateFormatter.forPattern(in.readString()); + this.parser = formatter.toDateMathParser(); + String zoneId = in.readString(); + if (in.getVersion().before(Version.V_7_0_0)) { + this.timeZone = DateUtils.of(zoneId); + } else { + this.timeZone = ZoneId.of(zoneId); + } } @Override @@ -188,12 +193,16 @@ public String getWriteableName() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(formatter.pattern()); - out.writeString(timeZone.getID()); + if (out.getVersion().before(Version.V_7_0_0)) { + out.writeString(DateUtils.zoneIdToDateTimeZone(timeZone).getID()); + } else { + out.writeString(timeZone.getId()); + } } @Override public String format(long value) { - return formatter.withZone(zoneId).formatMillis(value); + return formatter.format(Instant.ofEpochMilli(value).atZone(timeZone)); } @Override @@ -203,7 +212,7 @@ public String format(double value) { @Override public long parseLong(String value, boolean roundUp, LongSupplier now) { - return parser.parse(value, now, roundUp, DateUtils.dateTimeZoneToZoneId(timeZone)); + return parser.parse(value, now, roundUp, timeZone).toEpochMilli(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java index 28970ec828af9..53a7832884c76 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java @@ -19,11 +19,12 @@ package org.elasticsearch.search.aggregations.bucket.composite; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.rounding.DateTimeUnit; -import org.elasticsearch.common.rounding.Rounding; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -37,9 +38,10 @@ import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.internal.SearchContext; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; +import java.time.ZoneOffset; import java.util.Objects; import static org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder.DATE_FIELD_UNITS; @@ -70,9 +72,9 @@ public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuild }, Histogram.INTERVAL_FIELD, ObjectParser.ValueType.LONG); PARSER.declareField(DateHistogramValuesSourceBuilder::timeZone, p -> { if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return DateTimeZone.forID(p.text()); + return ZoneId.of(p.text()); } else { - return DateTimeZone.forOffsetHours(p.intValue()); + return ZoneOffset.ofHours(p.intValue()); } }, new ParseField("time_zone"), ObjectParser.ValueType.LONG); CompositeValuesSourceParserHelper.declareValuesSourceFields(PARSER, ValueType.NUMERIC); @@ -82,7 +84,7 @@ static DateHistogramValuesSourceBuilder parse(String name, XContentParser parser } private long interval = 0; - private DateTimeZone timeZone = null; + private ZoneId timeZone = null; private DateHistogramInterval dateHistogramInterval; public DateHistogramValuesSourceBuilder(String name) { @@ -93,8 +95,10 @@ protected DateHistogramValuesSourceBuilder(StreamInput in) throws IOException { super(in); this.interval = in.readLong(); this.dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new); - if (in.readBoolean()) { - timeZone = DateTimeZone.forID(in.readString()); + if (in.getVersion().before(Version.V_7_0_0)) { + this.timeZone = DateUtils.dateTimeZoneToZoneId(in.readOptionalTimeZone()); + } else { + this.timeZone = in.readOptionalZoneId(); } } @@ -102,10 +106,10 @@ protected DateHistogramValuesSourceBuilder(StreamInput in) throws IOException { protected void innerWriteTo(StreamOutput out) throws IOException { out.writeLong(interval); out.writeOptionalWriteable(dateHistogramInterval); - boolean hasTimeZone = timeZone != null; - out.writeBoolean(hasTimeZone); - if (hasTimeZone) { - out.writeString(timeZone.getID()); + if (out.getVersion().before(Version.V_7_0_0)) { + out.writeOptionalTimeZone(DateUtils.zoneIdToDateTimeZone(timeZone)); + } else { + out.writeOptionalZoneId(timeZone); } } @@ -176,7 +180,7 @@ public DateHistogramValuesSourceBuilder dateHistogramInterval(DateHistogramInter /** * Sets the time zone to use for this aggregation */ - public DateHistogramValuesSourceBuilder timeZone(DateTimeZone timeZone) { + public DateHistogramValuesSourceBuilder timeZone(ZoneId timeZone) { if (timeZone == null) { throw new IllegalArgumentException("[timeZone] must not be null: [" + name + "]"); } @@ -187,14 +191,14 @@ public DateHistogramValuesSourceBuilder timeZone(DateTimeZone timeZone) { /** * Gets the time zone to use for this aggregation */ - public DateTimeZone timeZone() { + public ZoneId timeZone() { return timeZone; } private Rounding createRounding() { Rounding.Builder tzRoundingBuilder; if (dateHistogramInterval != null) { - DateTimeUnit dateTimeUnit = DATE_FIELD_UNITS.get(dateHistogramInterval.toString()); + Rounding.DateTimeUnit dateTimeUnit = DATE_FIELD_UNITS.get(dateHistogramInterval.toString()); if (dateTimeUnit != null) { tzRoundingBuilder = Rounding.builder(dateTimeUnit); } else { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/RoundingValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/RoundingValuesSource.java index 635690c44f49e..9ee142fcd2fd5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/RoundingValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/RoundingValuesSource.java @@ -21,7 +21,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; -import org.elasticsearch.common.rounding.Rounding; +import org.elasticsearch.common.Rounding; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.aggregations.support.ValuesSource; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java index 87ba80af9a4b0..794ce066ed76e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java @@ -20,11 +20,10 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.rounding.DateTimeUnit; -import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -42,9 +41,9 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.search.internal.SearchContext; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; import java.util.Arrays; import java.util.Map; import java.util.Objects; @@ -70,19 +69,19 @@ public class AutoDateHistogramAggregationBuilder * The current implementation probably should not be invoked in a tight loop. * @return Array of RoundingInfo */ - static RoundingInfo[] buildRoundings(DateTimeZone timeZone) { + static RoundingInfo[] buildRoundings(ZoneId timeZone) { RoundingInfo[] roundings = new RoundingInfo[6]; - roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone), - 1000L, "s" , 1, 5, 10, 30); - roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone), + roundings[0] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.SECOND_OF_MINUTE, timeZone), + 1000L, "s", 1, 5, 10, 30); + roundings[1] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.MINUTES_OF_HOUR, timeZone), 60 * 1000L, "m", 1, 5, 10, 30); - roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone), - 60 * 60 * 1000L, "h", 1, 3, 12); - roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH, timeZone), + roundings[2] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.HOUR_OF_DAY, timeZone), + 60 * 60 * 1000L, "h",1, 3, 12); + roundings[3] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.DAY_OF_MONTH, timeZone), 24 * 60 * 60 * 1000L, "d", 1, 7); - roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR, timeZone), + roundings[4] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.MONTH_OF_YEAR, timeZone), 30 * 24 * 60 * 60 * 1000L, "M", 1, 3); - roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY, timeZone), + roundings[5] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.YEAR_OF_CENTURY, timeZone), 365 * 24 * 60 * 60 * 1000L, "y", 1, 5, 10, 20, 50, 100); return roundings; } @@ -156,7 +155,7 @@ public int getNumBuckets() { return new AutoDateHistogramAggregatorFactory(name, config, numBuckets, roundings, context, parent, subFactoriesBuilder, metaData); } - static Rounding createRounding(DateTimeUnit interval, DateTimeZone timeZone) { + static Rounding createRounding(Rounding.DateTimeUnit interval, ZoneId timeZone) { Rounding.Builder tzRoundingBuilder = Rounding.builder(interval); if (timeZone != null) { tzRoundingBuilder.timeZone(timeZone); @@ -196,7 +195,7 @@ public RoundingInfo(Rounding rounding, long roughEstimateDurationMillis, String } public RoundingInfo(StreamInput in) throws IOException { - rounding = Rounding.Streams.read(in); + rounding = Rounding.read(in); roughEstimateDurationMillis = in.readVLong(); innerIntervals = in.readIntArray(); unitAbbreviation = in.readString(); @@ -204,7 +203,7 @@ public RoundingInfo(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - Rounding.Streams.write(rounding, out); + rounding.writeTo(out); out.writeVLong(roughEstimateDurationMillis); out.writeIntArray(innerIntervals); out.writeString(unitAbbreviation); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java index 81bb70bd9672a..1b982ea9deca2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java @@ -23,8 +23,8 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Rounding; import org.elasticsearch.common.lease.Releasables; -import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.util.LongHash; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java index 5199313e0aca1..6d7852a864453 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java @@ -23,10 +23,9 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.DocIdSetIterator; +import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.rounding.DateTimeUnit; -import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.unit.TimeValue; @@ -54,10 +53,12 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.search.internal.SearchContext; -import org.joda.time.DateTimeField; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.zone.ZoneOffsetTransition; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -70,29 +71,30 @@ */ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuilder implements MultiBucketAggregationBuilder { + public static final String NAME = "date_histogram"; private static DateMathParser EPOCH_MILLIS_PARSER = DateFormatter.forPattern("epoch_millis").toDateMathParser(); - public static final Map DATE_FIELD_UNITS; + public static final Map DATE_FIELD_UNITS; static { - Map dateFieldUnits = new HashMap<>(); - dateFieldUnits.put("year", DateTimeUnit.YEAR_OF_CENTURY); - dateFieldUnits.put("1y", DateTimeUnit.YEAR_OF_CENTURY); - dateFieldUnits.put("quarter", DateTimeUnit.QUARTER); - dateFieldUnits.put("1q", DateTimeUnit.QUARTER); - dateFieldUnits.put("month", DateTimeUnit.MONTH_OF_YEAR); - dateFieldUnits.put("1M", DateTimeUnit.MONTH_OF_YEAR); - dateFieldUnits.put("week", DateTimeUnit.WEEK_OF_WEEKYEAR); - dateFieldUnits.put("1w", DateTimeUnit.WEEK_OF_WEEKYEAR); - dateFieldUnits.put("day", DateTimeUnit.DAY_OF_MONTH); - dateFieldUnits.put("1d", DateTimeUnit.DAY_OF_MONTH); - dateFieldUnits.put("hour", DateTimeUnit.HOUR_OF_DAY); - dateFieldUnits.put("1h", DateTimeUnit.HOUR_OF_DAY); - dateFieldUnits.put("minute", DateTimeUnit.MINUTES_OF_HOUR); - dateFieldUnits.put("1m", DateTimeUnit.MINUTES_OF_HOUR); - dateFieldUnits.put("second", DateTimeUnit.SECOND_OF_MINUTE); - dateFieldUnits.put("1s", DateTimeUnit.SECOND_OF_MINUTE); + Map dateFieldUnits = new HashMap<>(); + dateFieldUnits.put("year", Rounding.DateTimeUnit.YEAR_OF_CENTURY); + dateFieldUnits.put("1y", Rounding.DateTimeUnit.YEAR_OF_CENTURY); + dateFieldUnits.put("quarter", Rounding.DateTimeUnit.QUARTER_OF_YEAR); + dateFieldUnits.put("1q", Rounding.DateTimeUnit.QUARTER_OF_YEAR); + dateFieldUnits.put("month", Rounding.DateTimeUnit.MONTH_OF_YEAR); + dateFieldUnits.put("1M", Rounding.DateTimeUnit.MONTH_OF_YEAR); + dateFieldUnits.put("week", Rounding.DateTimeUnit.WEEK_OF_WEEKYEAR); + dateFieldUnits.put("1w", Rounding.DateTimeUnit.WEEK_OF_WEEKYEAR); + dateFieldUnits.put("day", Rounding.DateTimeUnit.DAY_OF_MONTH); + dateFieldUnits.put("1d", Rounding.DateTimeUnit.DAY_OF_MONTH); + dateFieldUnits.put("hour", Rounding.DateTimeUnit.HOUR_OF_DAY); + dateFieldUnits.put("1h", Rounding.DateTimeUnit.HOUR_OF_DAY); + dateFieldUnits.put("minute", Rounding.DateTimeUnit.MINUTES_OF_HOUR); + dateFieldUnits.put("1m", Rounding.DateTimeUnit.MINUTES_OF_HOUR); + dateFieldUnits.put("second", Rounding.DateTimeUnit.SECOND_OF_MINUTE); + dateFieldUnits.put("1s", Rounding.DateTimeUnit.SECOND_OF_MINUTE); DATE_FIELD_UNITS = unmodifiableMap(dateFieldUnits); } @@ -369,11 +371,11 @@ public String getType() { * coordinating node in order to generate missing buckets, which may cross a transition * even though data on the shards doesn't. */ - DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException { - final DateTimeZone tz = timeZone(); + ZoneId rewriteTimeZone(QueryShardContext context) throws IOException { + final ZoneId tz = timeZone(); if (field() != null && tz != null && - tz.isFixed() == false && + tz.getRules().isFixedOffset() == false && field() != null && script() == null) { final MappedFieldType ft = context.fieldMapper(field()); @@ -391,16 +393,29 @@ DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException { } if (anyInstant != null) { - final long prevTransition = tz.previousTransition(anyInstant); - final long nextTransition = tz.nextTransition(anyInstant); + Instant instant = Instant.ofEpochMilli(anyInstant); + ZoneOffsetTransition prevOffsetTransition = tz.getRules().previousTransition(instant); + final long prevTransition; + if (prevOffsetTransition != null) { + prevTransition = prevOffsetTransition.getInstant().toEpochMilli(); + } else { + prevTransition = instant.toEpochMilli(); + } + ZoneOffsetTransition nextOffsetTransition = tz.getRules().nextTransition(instant); + final long nextTransition; + if (nextOffsetTransition != null) { + nextTransition = nextOffsetTransition.getInstant().toEpochMilli(); + } else { + nextTransition = instant.toEpochMilli(); + } // We need all not only values but also rounded values to be within // [prevTransition, nextTransition]. final long low; - DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit(); + Rounding.DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit(); if (intervalAsUnit != null) { - final DateTimeField dateTimeField = intervalAsUnit.field(tz); - low = dateTimeField.roundCeiling(prevTransition); + Rounding rounding = Rounding.builder(intervalAsUnit).timeZone(timeZone()).build(); + low = rounding.nextRoundingValue(prevTransition); } else { final TimeValue intervalAsMillis = getIntervalAsTimeValue(); low = Math.addExact(prevTransition, intervalAsMillis.millis()); @@ -408,12 +423,12 @@ DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException { // rounding rounds down, so 'nextTransition' is a good upper bound final long high = nextTransition; - if (ft.isFieldWithinQuery(reader, low, high, true, false, DateTimeZone.UTC, EPOCH_MILLIS_PARSER, + if (ft.isFieldWithinQuery(reader, low, high, true, false, ZoneOffset.UTC, EPOCH_MILLIS_PARSER, context) == Relation.WITHIN) { // All values in this reader have the same offset despite daylight saving times. // This is very common for location-based timezones such as Europe/Paris in // combination with time-based indices. - return DateTimeZone.forOffsetMillis(tz.getOffset(anyInstant)); + return ZoneOffset.ofTotalSeconds(tz.getRules().getOffset(instant).getTotalSeconds()); } } } @@ -424,9 +439,9 @@ DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException { @Override protected ValuesSourceAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig config, AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { - final DateTimeZone tz = timeZone(); + final ZoneId tz = timeZone(); final Rounding rounding = createRounding(tz); - final DateTimeZone rewrittenTimeZone = rewriteTimeZone(context.getQueryShardContext()); + final ZoneId rewrittenTimeZone = rewriteTimeZone(context.getQueryShardContext()); final Rounding shardRounding; if (tz == rewrittenTimeZone) { shardRounding = rounding; @@ -447,7 +462,7 @@ DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException { * {@code null} then it means that the interval is expressed as a fixed * {@link TimeValue} and may be accessed via * {@link #getIntervalAsTimeValue()}. */ - private DateTimeUnit getIntervalAsDateTimeUnit() { + private Rounding.DateTimeUnit getIntervalAsDateTimeUnit() { if (dateHistogramInterval != null) { return DATE_FIELD_UNITS.get(dateHistogramInterval.toString()); } @@ -466,9 +481,9 @@ private TimeValue getIntervalAsTimeValue() { } } - private Rounding createRounding(DateTimeZone timeZone) { + private Rounding createRounding(ZoneId timeZone) { Rounding.Builder tzRoundingBuilder; - DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit(); + Rounding.DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit(); if (intervalAsUnit != null) { tzRoundingBuilder = Rounding.builder(intervalAsUnit); } else { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index 735a6717210a5..0c7a91505ae88 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -23,8 +23,8 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Rounding; import org.elasticsearch.common.lease.Releasables; -import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.util.LongHash; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java index c7ad6de7e0d72..8c025eb34eeb3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorFactory.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.aggregations.bucket.histogram; -import org.elasticsearch.common.rounding.Rounding; +import org.elasticsearch.common.Rounding; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBounds.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBounds.java index 4cecfeff83381..b0dfbb9d66e9d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBounds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBounds.java @@ -21,10 +21,10 @@ import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentFragment; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java index f2e450942c3ad..63d08f5e832ac 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -19,9 +19,9 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregations; @@ -32,10 +32,10 @@ import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.Instant; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -108,7 +108,7 @@ public String getKeyAsString() { @Override public Object getKey() { - return new DateTime(key, DateTimeZone.UTC); + return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index 496f8efc60ccf..2fa7f15a703ec 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -20,9 +20,9 @@ import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregations; @@ -34,10 +34,10 @@ import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.Instant; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; @@ -112,7 +112,7 @@ public String getKeyAsString() { @Override public Object getKey() { - return new DateTime(key, DateTimeZone.UTC); + return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC); } @Override @@ -185,13 +185,13 @@ static class EmptyBucketInfo { } EmptyBucketInfo(StreamInput in) throws IOException { - rounding = Rounding.Streams.read(in); + rounding = Rounding.read(in); subAggregations = InternalAggregations.readAggregations(in); bounds = in.readOptionalWriteable(ExtendedBounds::new); } void writeTo(StreamOutput out) throws IOException { - Rounding.Streams.write(rounding, out); + rounding.writeTo(out); subAggregations.writeTo(out); out.writeOptionalWriteable(bounds); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java index c9ff1389f8ad3..66a29b4e05073 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java @@ -24,10 +24,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.Instant; +import java.time.ZoneOffset; import java.util.List; public class ParsedAutoDateHistogram extends ParsedMultiBucketAggregation implements Histogram { @@ -83,7 +83,7 @@ public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBuck @Override public Object getKey() { if (key != null) { - return new DateTime(key, DateTimeZone.UTC); + return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC); } return null; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java index ace0cb59907a8..1cf43a53ed26c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedDateHistogram.java @@ -23,10 +23,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.Instant; +import java.time.ZoneOffset; import java.util.List; public class ParsedDateHistogram extends ParsedMultiBucketAggregation implements Histogram { @@ -62,7 +62,7 @@ public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBuck @Override public Object getKey() { if (key != null) { - return new DateTime(key, DateTimeZone.UTC); + return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC); } return null; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java index b5bdba85b78ef..2b5e92ddcb3f9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/DateRangeAggregationBuilder.java @@ -30,9 +30,9 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper; import org.elasticsearch.search.internal.SearchContext; -import org.joda.time.DateTime; import java.io.IOException; +import java.time.ZonedDateTime; import java.util.Map; public class DateRangeAggregationBuilder extends AbstractRangeBuilder { @@ -224,24 +224,24 @@ public DateRangeAggregationBuilder addUnboundedFrom(double from) { * @param to * the upper bound on the dates, exclusive */ - public DateRangeAggregationBuilder addRange(String key, DateTime from, DateTime to) { + public DateRangeAggregationBuilder addRange(String key, ZonedDateTime from, ZonedDateTime to) { addRange(new RangeAggregator.Range(key, convertDateTime(from), convertDateTime(to))); return this; } - private static Double convertDateTime(DateTime dateTime) { + private static Double convertDateTime(ZonedDateTime dateTime) { if (dateTime == null) { return null; } else { - return (double) dateTime.getMillis(); + return (double) dateTime.toInstant().toEpochMilli(); } } /** - * Same as {@link #addRange(String, DateTime, DateTime)} but the key will be + * Same as {@link #addRange(String, ZonedDateTime, ZonedDateTime)} but the key will be * automatically generated based on from and to. */ - public DateRangeAggregationBuilder addRange(DateTime from, DateTime to) { + public DateRangeAggregationBuilder addRange(ZonedDateTime from, ZonedDateTime to) { return addRange(null, from, to); } @@ -253,16 +253,16 @@ public DateRangeAggregationBuilder addRange(DateTime from, DateTime to) { * @param to * the upper bound on the dates, exclusive */ - public DateRangeAggregationBuilder addUnboundedTo(String key, DateTime to) { + public DateRangeAggregationBuilder addUnboundedTo(String key, ZonedDateTime to) { addRange(new RangeAggregator.Range(key, null, convertDateTime(to))); return this; } /** - * Same as {@link #addUnboundedTo(String, DateTime)} but the key will be + * Same as {@link #addUnboundedTo(String, ZonedDateTime)} but the key will be * computed automatically. */ - public DateRangeAggregationBuilder addUnboundedTo(DateTime to) { + public DateRangeAggregationBuilder addUnboundedTo(ZonedDateTime to) { return addUnboundedTo(null, to); } @@ -274,16 +274,16 @@ public DateRangeAggregationBuilder addUnboundedTo(DateTime to) { * @param from * the lower bound on the distances, inclusive */ - public DateRangeAggregationBuilder addUnboundedFrom(String key, DateTime from) { + public DateRangeAggregationBuilder addUnboundedFrom(String key, ZonedDateTime from) { addRange(new RangeAggregator.Range(key, convertDateTime(from), null)); return this; } /** - * Same as {@link #addUnboundedFrom(String, DateTime)} but the key will be + * Same as {@link #addUnboundedFrom(String, ZonedDateTime)} but the key will be * computed automatically. */ - public DateRangeAggregationBuilder addUnboundedFrom(DateTime from) { + public DateRangeAggregationBuilder addUnboundedFrom(ZonedDateTime from) { return addUnboundedFrom(null, from); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java index 408c1325b85c9..a354aaeadbac0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java @@ -24,10 +24,10 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.ValueType; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.Instant; +import java.time.ZoneOffset; import java.util.List; import java.util.Map; @@ -48,12 +48,14 @@ public Bucket(String key, double from, double to, long docCount, InternalAggrega @Override public Object getFrom() { - return Double.isInfinite(((Number) from).doubleValue()) ? null : new DateTime(((Number) from).longValue(), DateTimeZone.UTC); + return Double.isInfinite(((Number) from).doubleValue()) ? null : + Instant.ofEpochMilli(((Number) from).longValue()).atZone(ZoneOffset.UTC); } @Override public Object getTo() { - return Double.isInfinite(((Number) to).doubleValue()) ? null : new DateTime(((Number) to).longValue(), DateTimeZone.UTC); + return Double.isInfinite(((Number) to).doubleValue()) ? null : + Instant.ofEpochMilli(((Number) to).longValue()).atZone(ZoneOffset.UTC); } private Double internalGetFrom() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java index 68adc41d23765..d4504e245541b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ParsedDateRange.java @@ -21,10 +21,11 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; public class ParsedDateRange extends ParsedRange { @@ -59,11 +60,11 @@ public Object getTo() { return doubleAsDateTime(to); } - private static DateTime doubleAsDateTime(Double d) { + private static ZonedDateTime doubleAsDateTime(Double d) { if (d == null || Double.isInfinite(d)) { return null; } - return new DateTime(d.longValue(), DateTimeZone.UTC); + return Instant.ofEpochMilli(d.longValue()).atZone(ZoneOffset.UTC); } static ParsedBucket fromXContent(final XContentParser parser, final boolean keyed) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java index b8785d0bf7045..68ec9085df52a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java @@ -21,9 +21,9 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.rounding.DateTimeUnit; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -34,7 +34,6 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.ArrayList; @@ -139,9 +138,9 @@ protected PipelineAggregator createInternal(Map metaData) throws } Long xAxisUnits = null; if (units != null) { - DateTimeUnit dateTimeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(units); + Rounding.DateTimeUnit dateTimeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(units); if (dateTimeUnit != null) { - xAxisUnits = dateTimeUnit.field(DateTimeZone.UTC).getDurationField().getUnitMillis(); + xAxisUnits = dateTimeUnit.getField().getBaseUnit().getDuration().toMillis(); } else { TimeValue timeValue = TimeValue.parseTimeValue(units, null, getClass().getSimpleName() + ".unit"); if (timeValue != null) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java index fbc3081758f96..de112c427a751 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java @@ -19,19 +19,22 @@ package org.elasticsearch.search.aggregations.support; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.Script; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; +import java.time.ZoneOffset; import java.util.Objects; import java.util.function.BiFunction; @@ -39,7 +42,7 @@ public class MultiValuesSourceFieldConfig implements Writeable, ToXContentObject private String fieldName; private Object missing; private Script script; - private DateTimeZone timeZone; + private ZoneId timeZone; private static final String NAME = "field_config"; @@ -62,16 +65,16 @@ public class MultiValuesSourceFieldConfig implements Writeable, ToXContentObject if (timezoneAware) { parser.declareField(MultiValuesSourceFieldConfig.Builder::setTimeZone, p -> { if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return DateTimeZone.forID(p.text()); + return ZoneId.of(p.text()); } else { - return DateTimeZone.forOffsetHours(p.intValue()); + return ZoneOffset.ofHours(p.intValue()); } }, ParseField.CommonFields.TIME_ZONE, ObjectParser.ValueType.LONG); } return parser; }; - private MultiValuesSourceFieldConfig(String fieldName, Object missing, Script script, DateTimeZone timeZone) { + private MultiValuesSourceFieldConfig(String fieldName, Object missing, Script script, ZoneId timeZone) { this.fieldName = fieldName; this.missing = missing; this.script = script; @@ -82,7 +85,11 @@ public MultiValuesSourceFieldConfig(StreamInput in) throws IOException { this.fieldName = in.readString(); this.missing = in.readGenericValue(); this.script = in.readOptionalWriteable(Script::new); - this.timeZone = in.readOptionalTimeZone(); + if (in.getVersion().before(Version.V_7_0_0)) { + this.timeZone = DateUtils.dateTimeZoneToZoneId(in.readOptionalTimeZone()); + } else { + this.timeZone = in.readOptionalZoneId(); + } } public Object getMissing() { @@ -93,7 +100,7 @@ public Script getScript() { return script; } - public DateTimeZone getTimeZone() { + public ZoneId getTimeZone() { return timeZone; } @@ -106,7 +113,11 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(fieldName); out.writeGenericValue(missing); out.writeOptionalWriteable(script); - out.writeOptionalTimeZone(timeZone); + if (out.getVersion().before(Version.V_7_0_0)) { + out.writeOptionalTimeZone(DateUtils.zoneIdToDateTimeZone(timeZone)); + } else { + out.writeOptionalZoneId(timeZone); + } } @Override @@ -122,7 +133,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(ParseField.CommonFields.FIELD.getPreferredName(), fieldName); } if (timeZone != null) { - builder.field(ParseField.CommonFields.TIME_ZONE.getPreferredName(), timeZone.getID()); + builder.field(ParseField.CommonFields.TIME_ZONE.getPreferredName(), timeZone.getId()); } builder.endObject(); return builder; @@ -153,7 +164,7 @@ public static class Builder { private String fieldName; private Object missing = null; private Script script = null; - private DateTimeZone timeZone = null; + private ZoneId timeZone = null; public String getFieldName() { return fieldName; @@ -182,11 +193,11 @@ public Builder setScript(Script script) { return this; } - public DateTimeZone getTimeZone() { + public ZoneId getTimeZone() { return timeZone; } - public Builder setTimeZone(DateTimeZone timeZone) { + public Builder setTimeZone(ZoneId timeZone) { this.timeZone = timeZone; return this; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java index 25a90e581f00c..3cbd11288bffc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java @@ -28,9 +28,9 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.search.DocValueFormat; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneOffset; public enum ValueType implements Writeable { @@ -42,7 +42,7 @@ public enum ValueType implements Writeable { DOUBLE((byte) 3, "float|double", "double", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW), NUMBER((byte) 4, "number", "number", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW), DATE((byte) 5, "date", "date", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, - new DocValueFormat.DateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, DateTimeZone.UTC)), + new DocValueFormat.DateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, ZoneOffset.UTC)), IP((byte) 6, "ip", "ip", ValuesSourceType.BYTES, IndexFieldData.class, DocValueFormat.IP), // TODO: what is the difference between "number" and "numeric"? NUMERIC((byte) 7, "numeric", "numeric", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW), diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java index 040cc1b542f07..d3abe6f3169ee 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregationBuilder.java @@ -18,8 +18,10 @@ */ package org.elasticsearch.search.aggregations.support; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; @@ -28,9 +30,9 @@ import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.internal.SearchContext; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; import java.util.Map; import java.util.Objects; @@ -81,7 +83,7 @@ public final AB subAggregations(Builder subFactories) { private ValueType valueType = null; private String format = null; private Object missing = null; - private DateTimeZone timeZone = null; + private ZoneId timeZone = null; protected ValuesSourceConfig config; protected ValuesSourceAggregationBuilder(String name, ValuesSourceType valuesSourceType, ValueType targetValueType) { @@ -144,8 +146,10 @@ private void read(StreamInput in) throws IOException { } format = in.readOptionalString(); missing = in.readGenericValue(); - if (in.readBoolean()) { - timeZone = DateTimeZone.forID(in.readString()); + if (in.getVersion().before(Version.V_7_0_0)) { + timeZone = DateUtils.dateTimeZoneToZoneId(in.readOptionalTimeZone()); + } else { + timeZone = in.readOptionalZoneId(); } } @@ -167,10 +171,10 @@ protected final void doWriteTo(StreamOutput out) throws IOException { } out.writeOptionalString(format); out.writeGenericValue(missing); - boolean hasTimeZone = timeZone != null; - out.writeBoolean(hasTimeZone); - if (hasTimeZone) { - out.writeString(timeZone.getID()); + if (out.getVersion().before(Version.V_7_0_0)) { + out.writeOptionalTimeZone(DateUtils.zoneIdToDateTimeZone(timeZone)); + } else { + out.writeOptionalZoneId(timeZone); } innerWriteTo(out); } @@ -289,7 +293,7 @@ public Object missing() { * Sets the time zone to use for this aggregation */ @SuppressWarnings("unchecked") - public AB timeZone(DateTimeZone timeZone) { + public AB timeZone(ZoneId timeZone) { if (timeZone == null) { throw new IllegalArgumentException("[timeZone] must not be null: [" + name + "]"); } @@ -300,7 +304,7 @@ public AB timeZone(DateTimeZone timeZone) { /** * Gets the time zone to use for this aggregation */ - public DateTimeZone timeZone() { + public ZoneId timeZone() { return timeZone; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java index 39c53f39c7dac..82baa04fe8f1a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java @@ -32,7 +32,9 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationExecutionException; -import org.joda.time.DateTimeZone; + +import java.time.ZoneId; +import java.time.ZoneOffset; /** * A configuration that tells aggregations how to retrieve data from the index @@ -48,7 +50,7 @@ public static ValuesSourceConfig resolve( ValueType valueType, String field, Script script, Object missing, - DateTimeZone timeZone, + ZoneId timeZone, String format) { if (field == null) { @@ -121,7 +123,7 @@ private static AggregationScript.LeafFactory createScript(Script script, QuerySh } } - private static DocValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType, @Nullable DateTimeZone tz) { + private static DocValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType, @Nullable ZoneId tz) { if (valueType == null) { return DocValueFormat.RAW; // we can't figure it out } @@ -130,7 +132,7 @@ private static DocValueFormat resolveFormat(@Nullable String format, @Nullable V valueFormat = new DocValueFormat.Decimal(format); } if (valueFormat instanceof DocValueFormat.DateTime && format != null) { - valueFormat = new DocValueFormat.DateTime(DateFormatter.forPattern(format), tz != null ? tz : DateTimeZone.UTC); + valueFormat = new DocValueFormat.DateTime(DateFormatter.forPattern(format), tz != null ? tz : ZoneOffset.UTC); } return valueFormat; } @@ -142,7 +144,7 @@ private static DocValueFormat resolveFormat(@Nullable String format, @Nullable V private boolean unmapped = false; private DocValueFormat format = DocValueFormat.RAW; private Object missing; - private DateTimeZone timeZone; + private ZoneId timeZone; public ValuesSourceConfig(ValuesSourceType valueSourceType) { this.valueSourceType = valueSourceType; @@ -206,12 +208,12 @@ public Object missing() { return this.missing; } - public ValuesSourceConfig timezone(final DateTimeZone timeZone) { - this.timeZone= timeZone; + public ValuesSourceConfig timezone(final ZoneId timeZone) { + this.timeZone = timeZone; return this; } - public DateTimeZone timezone() { + public ZoneId timezone() { return this.timeZone; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParserHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParserHelper.java index fc0a2f3a9fefe..24bdffaa3fa89 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParserHelper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParserHelper.java @@ -25,7 +25,9 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.Script; -import org.joda.time.DateTimeZone; + +import java.time.ZoneId; +import java.time.ZoneOffset; public final class ValuesSourceParserHelper { @@ -91,9 +93,9 @@ private static void declareFields( if (timezoneAware) { objectParser.declareField(ValuesSourceAggregationBuilder::timeZone, p -> { if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return DateTimeZone.forID(p.text()); + return ZoneId.of(p.text()); } else { - return DateTimeZone.forOffsetHours(p.intValue()); + return ZoneOffset.ofHours(p.intValue()); } }, ParseField.CommonFields.TIME_ZONE, ObjectParser.ValueType.LONG); } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java index c1692f606178e..cbd4ff659e599 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; @@ -52,7 +51,7 @@ public final class SnapshotInfo implements Comparable, ToXContent, public static final String CONTEXT_MODE_PARAM = "context_mode"; public static final String CONTEXT_MODE_SNAPSHOT = "SNAPSHOT"; - private static final DateFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("strictDateOptionalTime"); + private static final DateFormatter DATE_TIME_FORMATTER = DateFormatter.forPattern("strictDateOptionalTime"); private static final String SNAPSHOT = "snapshot"; private static final String UUID = "uuid"; private static final String INDICES = "indices"; diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java index 6b8d1ab4fafb7..9f6f19596d080 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java @@ -25,7 +25,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.time.DateFormatters; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -276,7 +276,7 @@ public void testRolloverOnExistingIndex() throws Exception { public void testRolloverWithDateMath() { ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); assumeTrue("only works on the same day", now.plusMinutes(5).getDayOfYear() == now.getDayOfYear()); - String index = "test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-1"; + String index = "test-" + DateFormatter.forPattern("YYYY.MM.dd").format(now) + "-1"; String dateMathExp = ""; assertAcked(prepareCreate(dateMathExp).addAlias(new Alias("test_alias")).get()); ensureGreen(index); @@ -290,14 +290,14 @@ public void testRolloverWithDateMath() { ensureGreen(index); RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias").get(); assertThat(response.getOldIndex(), equalTo(index)); - assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000002")); + assertThat(response.getNewIndex(), equalTo("test-" + DateFormatter.forPattern("YYYY.MM").format(now) + "-000002")); assertThat(response.isDryRun(), equalTo(false)); assertThat(response.isRolledOver(), equalTo(true)); assertThat(response.getConditionStatus().size(), equalTo(0)); response = client().admin().indices().prepareRolloverIndex("test_alias").get(); - assertThat(response.getOldIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000002")); - assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000003")); + assertThat(response.getOldIndex(), equalTo("test-" + DateFormatter.forPattern("YYYY.MM").format(now) + "-000002")); + assertThat(response.getNewIndex(), equalTo("test-" + DateFormatter.forPattern("YYYY.MM").format(now) + "-000003")); assertThat(response.isDryRun(), equalTo(false)); assertThat(response.isRolledOver(), equalTo(true)); assertThat(response.getConditionStatus().size(), equalTo(0)); @@ -310,8 +310,8 @@ public void testRolloverWithDateMath() { IndexMetaData.SETTING_INDEX_PROVIDED_NAME)); response = client().admin().indices().prepareRolloverIndex("test_alias").setNewIndexName("").get(); - assertThat(response.getOldIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000003")); - assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-000004")); + assertThat(response.getOldIndex(), equalTo("test-" + DateFormatter.forPattern("YYYY.MM").format(now) + "-000003")); + assertThat(response.getNewIndex(), equalTo("test-" + DateFormatter.forPattern("YYYY.MM.dd").format(now) + "-000004")); assertThat(response.isDryRun(), equalTo(false)); assertThat(response.isRolledOver(), equalTo(true)); assertThat(response.getConditionStatus().size(), equalTo(0)); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java index 5c67e1bbe566c..2f52bd0d40aae 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java @@ -93,25 +93,25 @@ public void testExpression_MultiParts() throws Exception { @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37037") public void testExpression_CustomFormat() throws Exception { - List results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{YYYY.MM.dd}}>")); + List results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd}}>")); assertThat(results.size(), equalTo(1)); assertThat(results.get(0), - equalTo(".marvel-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(context.getStartTime(), UTC)))); + equalTo(".marvel-" + DateTimeFormat.forPattern("yyyy.MM.dd").print(new DateTime(context.getStartTime(), UTC)))); } public void testExpression_EscapeStatic() throws Exception { List result = expressionResolver.resolve(context, Arrays.asList("<.mar\\{v\\}el-{now/d}>")); assertThat(result.size(), equalTo(1)); assertThat(result.get(0), - equalTo(".mar{v}el-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(context.getStartTime(), UTC)))); + equalTo(".mar{v}el-" + DateTimeFormat.forPattern("yyyy.MM.dd").print(new DateTime(context.getStartTime(), UTC)))); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37037") public void testExpression_EscapeDateFormat() throws Exception { - List result = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'YYYY}}>")); + List result = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'yyyy}}>")); assertThat(result.size(), equalTo(1)); assertThat(result.get(0), - equalTo(".marvel-" + DateTimeFormat.forPattern("'{year}'YYYY").print(new DateTime(context.getStartTime(), UTC)))); + equalTo(".marvel-" + DateTimeFormat.forPattern("'{year}'yyyy").print(new DateTime(context.getStartTime(), UTC)))); } public void testExpression_MixedArray() throws Exception { @@ -150,10 +150,10 @@ public void testExpression_CustomTimeZoneInIndexName() throws Exception { now = DateTime.now(UTC).withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0); } Context context = new Context(this.context.getState(), this.context.getOptions(), now.getMillis()); - List results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{YYYY.MM.dd|" + timeZone.getID() + "}}>")); + List results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getID() + "}}>")); assertThat(results.size(), equalTo(1)); logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, results.get(0)); - assertThat(results.get(0), equalTo(".marvel-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now.withZone(timeZone)))); + assertThat(results.get(0), equalTo(".marvel-" + DateTimeFormat.forPattern("yyyy.MM.dd").print(now.withZone(timeZone)))); } public void testExpressionInvalidUnescaped() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/common/RoundingTests.java b/server/src/test/java/org/elasticsearch/common/RoundingTests.java index 1664f67a44df9..9bc7c10abd8c8 100644 --- a/server/src/test/java/org/elasticsearch/common/RoundingTests.java +++ b/server/src/test/java/org/elasticsearch/common/RoundingTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.rounding.DateTimeUnit; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; @@ -317,7 +318,7 @@ public void testIntervalRounding_HalfDay_DST() { } /** - * randomized test on {@link org.elasticsearch.common.rounding.Rounding.TimeIntervalRounding} with random interval and time zone offsets + * randomized test on {@link org.elasticsearch.common.Rounding.TimeIntervalRounding} with random interval and time zone offsets */ public void testIntervalRoundingRandom() { for (int i = 0; i < 1000; i++) { @@ -728,7 +729,7 @@ private static long time(String time) { } private static long time(String time, ZoneId zone) { - TemporalAccessor accessor = DateFormatters.forPattern("date_optional_time").withZone(zone).parse(time); + TemporalAccessor accessor = DateFormatter.forPattern("date_optional_time").withZone(zone).parse(time); return DateFormatters.toZonedDateTime(accessor).toInstant().toEpochMilli(); } diff --git a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java index b2370dadb604c..c7abea63be081 100644 --- a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java +++ b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java @@ -29,13 +29,11 @@ import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; -import java.time.format.DateTimeParseException; import java.time.temporal.TemporalAccessor; import java.util.Locale; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.startsWith; public class JavaJodaTimeDuellingTests extends ESTestCase { @@ -64,11 +62,22 @@ public void testTimeZoneFormatting() { formatter3.parse("20181126T121212.123-0830"); } - public void testCustomTimeFormats() { - assertSameDate("2010 12 06 11:05:15", "yyyy dd MM HH:mm:ss"); - assertSameDate("12/06", "dd/MM"); - assertSameDate("Nov 24 01:29:01 -0800", "MMM dd HH:mm:ss Z"); - } + // this test requires tests to run with -Djava.locale.providers=COMPAT in order to work +// public void testCustomTimeFormats() { +// assertSameDate("2010 12 06 11:05:15", "yyyy dd MM HH:mm:ss"); +// assertSameDate("12/06", "dd/MM"); +// assertSameDate("Nov 24 01:29:01 -0800", "MMM dd HH:mm:ss Z"); +// +// // also ensure that locale based dates are the same +// assertSameDate("Di., 05 Dez. 2000 02:55:00 -0800", "E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de")); +// assertSameDate("Mi., 06 Dez. 2000 02:55:00 -0800", "E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de")); +// assertSameDate("Do., 07 Dez. 2000 00:00:00 -0800", "E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de")); +// assertSameDate("Fr., 08 Dez. 2000 00:00:00 -0800", "E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de")); +// +// DateTime dateTimeNow = DateTime.now(DateTimeZone.UTC); +// ZonedDateTime javaTimeNow = Instant.ofEpochMilli(dateTimeNow.getMillis()).atZone(ZoneOffset.UTC); +// assertSamePrinterOutput("E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de"), javaTimeNow, dateTimeNow); +// } public void testDuellingFormatsValidParsing() { assertSameDate("1522332219", "epoch_second"); @@ -133,10 +142,6 @@ public void testDuellingFormatsValidParsing() { assertSameDate("2018-12-31T12:12:12.1", "date_hour_minute_second_millis"); assertSameDate("2018-12-31T12:12:12.1", "date_hour_minute_second_fraction"); - assertSameDate("10000", "date_optional_time"); - assertSameDate("10000T", "date_optional_time"); - assertSameDate("2018", "date_optional_time"); - assertSameDate("2018T", "date_optional_time"); assertSameDate("2018-05", "date_optional_time"); assertSameDate("2018-05-30", "date_optional_time"); assertSameDate("2018-05-30T20", "date_optional_time"); @@ -278,7 +283,7 @@ public void testDuellingFormatsValidParsing() { // joda comes up with a different exception message here, so we have to adapt assertJodaParseException("2012-W1-8", "week_date", "Cannot parse \"2012-W1-8\": Value 8 for dayOfWeek must be in the range [1,7]"); - assertJavaTimeParseException("2012-W1-8", "week_date", "Text '2012-W1-8' could not be parsed"); + assertJavaTimeParseException("2012-W1-8", "week_date"); assertSameDate("2012-W48-6T10:15:30.123Z", "week_date_time"); assertSameDate("2012-W48-6T10:15:30.123+0100", "week_date_time"); @@ -358,6 +363,7 @@ public void testDuelingStrictParsing() { assertParseException("2018-12-1", "strict_date_optional_time"); assertParseException("2018-1-31", "strict_date_optional_time"); assertParseException("10000-01-31", "strict_date_optional_time"); + assertSameDate("2010-01-05T02:00", "strict_date_optional_time"); assertSameDate("2018-12-31T10:15:30", "strict_date_optional_time"); assertSameDate("2018-12-31T10:15:30Z", "strict_date_optional_time"); assertSameDate("2018-12-31T10:15:30+0100", "strict_date_optional_time"); @@ -365,6 +371,7 @@ public void testDuelingStrictParsing() { assertParseException("2018-12-31T10:15:3", "strict_date_optional_time"); assertParseException("2018-12-31T10:5:30", "strict_date_optional_time"); assertParseException("2018-12-31T9:15:30", "strict_date_optional_time"); + assertSameDate("2015-01-04T00:00Z", "strict_date_optional_time"); assertSameDate("2018-12-31T10:15:30.123Z", "strict_date_time"); assertSameDate("2018-12-31T10:15:30.123+0100", "strict_date_time"); assertSameDate("2018-12-31T10:15:30.123+01:00", "strict_date_time"); @@ -456,7 +463,7 @@ public void testDuelingStrictParsing() { // joda comes up with a different exception message here, so we have to adapt assertJodaParseException("2012-W01-8", "strict_week_date", "Cannot parse \"2012-W01-8\": Value 8 for dayOfWeek must be in the range [1,7]"); - assertJavaTimeParseException("2012-W01-8", "strict_week_date", "Text '2012-W01-8' could not be parsed"); + assertJavaTimeParseException("2012-W01-8", "strict_week_date"); assertSameDate("2012-W48-6T10:15:30.123Z", "strict_week_date_time"); assertSameDate("2012-W48-6T10:15:30.123+0100", "strict_week_date_time"); @@ -585,19 +592,55 @@ public void testSamePrinterOutput() { assertSamePrinterOutput("strictYear", javaDate, jodaDate); assertSamePrinterOutput("strictYearMonth", javaDate, jodaDate); assertSamePrinterOutput("strictYearMonthDay", javaDate, jodaDate); + assertSamePrinterOutput("strict_date_optional_time", javaDate, jodaDate); + assertSamePrinterOutput("epoch_millis", javaDate, jodaDate); + } + + public void testSamePrinterOutputWithTimeZone() { + String format = "strict_date_optional_time"; + String dateInput = "2017-02-01T08:02:00.000-01:00"; + DateFormatter javaFormatter = DateFormatter.forPattern(format); + TemporalAccessor javaDate = javaFormatter.parse(dateInput); + + DateFormatter jodaFormatter = Joda.forPattern(format); + DateTime dateTime = jodaFormatter.parseJoda(dateInput); + + String javaDateString = javaFormatter.withZone(ZoneOffset.ofHours(-1)).format(javaDate); + String jodaDateString = jodaFormatter.withZone(ZoneOffset.ofHours(-1)).formatJoda(dateTime); + String message = String.format(Locale.ROOT, "expected string representation to be equal for format [%s]: joda [%s], java [%s]", + format, jodaDateString, javaDateString); + assertThat(message, javaDateString, is(jodaDateString)); + } + + public void testDateFormatterWithLocale() { + Locale locale = randomLocale(random()); + String pattern = randomBoolean() ? "strict_date_optional_time||date_time" : "date_time||strict_date_optional_time"; + DateFormatter formatter = DateFormatter.forPattern(pattern).withLocale(locale); + assertThat(formatter.pattern(), is(pattern)); + assertThat(formatter.locale(), is(locale)); } public void testSeveralTimeFormats() { - DateFormatter jodaFormatter = DateFormatter.forPattern("year_month_day||ordinal_date"); - DateFormatter javaFormatter = DateFormatter.forPattern("8year_month_day||ordinal_date"); - assertSameDate("2018-12-12", "year_month_day||ordinal_date", jodaFormatter, javaFormatter); - assertSameDate("2018-128", "year_month_day||ordinal_date", jodaFormatter, javaFormatter); + { + String format = "year_month_day||ordinal_date"; + DateFormatter jodaFormatter = Joda.forPattern(format); + DateFormatter javaFormatter = DateFormatter.forPattern(format); + assertSameDate("2018-12-12", format, jodaFormatter, javaFormatter); + assertSameDate("2018-128", format, jodaFormatter, javaFormatter); + } + { + String format = "strictDateOptionalTime||dd-MM-yyyy"; + DateFormatter jodaFormatter = Joda.forPattern(format); + DateFormatter javaFormatter = DateFormatter.forPattern(format); + assertSameDate("31-01-2014", format, jodaFormatter, javaFormatter); + } } private void assertSamePrinterOutput(String format, ZonedDateTime javaDate, DateTime jodaDate) { assertThat(jodaDate.getMillis(), is(javaDate.toInstant().toEpochMilli())); - String javaTimeOut = DateFormatters.forPattern(format).format(javaDate); - String jodaTimeOut = DateFormatter.forPattern(format).formatJoda(jodaDate); + String javaTimeOut = DateFormatter.forPattern(format).format(javaDate); + String jodaTimeOut = Joda.forPattern(format).formatJoda(jodaDate); + if (JavaVersion.current().getVersion().get(0) == 8 && javaTimeOut.endsWith(".0") && (format.equals("epoch_second") || format.equals("epoch_millis"))) { // java 8 has a bug in DateTimeFormatter usage when printing dates that rely on isSupportedBy for fields, which is @@ -611,7 +654,7 @@ private void assertSamePrinterOutput(String format, ZonedDateTime javaDate, Date private void assertSameDate(String input, String format) { DateFormatter jodaFormatter = Joda.forPattern(format); - DateFormatter javaFormatter = DateFormatters.forPattern(format); + DateFormatter javaFormatter = DateFormatter.forPattern(format); assertSameDate(input, format, jodaFormatter, javaFormatter); } @@ -629,7 +672,7 @@ private void assertSameDate(String input, String format, DateFormatter jodaForma private void assertParseException(String input, String format) { assertJodaParseException(input, format, "Invalid format: \"" + input); - assertJavaTimeParseException(input, format, "Text '" + input + "' could not be parsed"); + assertJavaTimeParseException(input, format); } private void assertJodaParseException(String input, String format, String expectedMessage) { @@ -638,9 +681,10 @@ private void assertJodaParseException(String input, String format, String expect assertThat(e.getMessage(), containsString(expectedMessage)); } - private void assertJavaTimeParseException(String input, String format, String expectedMessage) { - DateFormatter javaTimeFormatter = DateFormatters.forPattern(format); - DateTimeParseException dateTimeParseException = expectThrows(DateTimeParseException.class, () -> javaTimeFormatter.parse(input)); - assertThat(dateTimeParseException.getMessage(), startsWith(expectedMessage)); + private void assertJavaTimeParseException(String input, String format) { + DateFormatter javaTimeFormatter = DateFormatter.forPattern(format); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> javaTimeFormatter.parse(input)); + assertThat(e.getMessage(), containsString(input)); + assertThat(e.getMessage(), containsString(format)); } } diff --git a/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java b/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java index e502dfc6f963f..19aea3f19ba3b 100644 --- a/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTimeZone; +import java.time.Instant; import java.time.ZoneId; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.LongSupplier; @@ -35,7 +36,7 @@ public class JodaDateMathParserTests extends ESTestCase { - DateFormatter formatter = DateFormatter.forPattern("dateOptionalTime||epoch_millis"); + DateFormatter formatter = Joda.forPattern("dateOptionalTime||epoch_millis"); DateMathParser parser = formatter.toDateMathParser(); void assertDateMathEquals(String toTest, String expected) { @@ -43,12 +44,12 @@ void assertDateMathEquals(String toTest, String expected) { } void assertDateMathEquals(String toTest, String expected, final long now, boolean roundUp, DateTimeZone timeZone) { - long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone); + long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone).toEpochMilli(); assertDateEquals(gotMillis, toTest, expected); } void assertDateEquals(long gotMillis, String original, String expected) { - long expectedMillis = parser.parse(expected, () -> 0); + long expectedMillis = parser.parse(expected, () -> 0).toEpochMilli(); if (gotMillis != expectedMillis) { fail("Date math not equal\n" + "Original : " + original + "\n" + @@ -147,7 +148,7 @@ public void testMultipleAdjustments() { public void testNow() { - final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null); + final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null).toEpochMilli(); assertDateMathEquals("now", "2014-11-18T14:27:32", now, false, null); assertDateMathEquals("now+M", "2014-12-18T14:27:32", now, false, null); @@ -164,10 +165,10 @@ public void testRoundingPreservesEpochAsBaseDate() { DateMathParser parser = formatter.toDateMathParser(); assertEquals( this.formatter.parseMillis("1970-01-01T04:52:20.000Z"), - parser.parse("04:52:20", () -> 0, false, (ZoneId) null)); + parser.parse("04:52:20", () -> 0, false, (ZoneId) null).toEpochMilli()); assertEquals( this.formatter.parseMillis("1970-01-01T04:52:20.999Z"), - parser.parse("04:52:20", () -> 0, true, (ZoneId) null)); + parser.parse("04:52:20", () -> 0, true, (ZoneId) null).toEpochMilli()); } // Implicit rounding happening when parts of the date are not specified @@ -185,9 +186,9 @@ public void testImplicitRounding() { assertDateMathEquals("2014-11-18T09:20", "2014-11-18T08:20:59.999Z", 0, true, DateTimeZone.forID("CET")); // implicit rounding with explicit timezone in the date format - DateFormatter formatter = DateFormatter.forPattern("yyyy-MM-ddZ"); + DateFormatter formatter = Joda.forPattern("yyyy-MM-ddZ"); DateMathParser parser = formatter.toDateMathParser(); - long time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null); + Instant time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null); assertEquals(this.parser.parse("2011-10-09T00:00:00.000+01:00", () -> 0), time); time = parser.parse("2011-10-09+01:00", () -> 0, true, (ZoneId) null); assertEquals(this.parser.parse("2011-10-09T23:59:59.999+01:00", () -> 0), time); @@ -261,7 +262,7 @@ public void testTimestamps() { // also check other time units JodaDateMathParser parser = new JodaDateMathParser(Joda.forPattern("epoch_second")); - long datetime = parser.parse("1418248078", () -> 0); + long datetime = parser.parse("1418248078", () -> 0).toEpochMilli(); assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000"); // a timestamp before 10000 is a year diff --git a/server/src/test/java/org/elasticsearch/common/joda/JodaTests.java b/server/src/test/java/org/elasticsearch/common/joda/JodaTests.java index fde9d73fae892..003785b3c87b3 100644 --- a/server/src/test/java/org/elasticsearch/common/joda/JodaTests.java +++ b/server/src/test/java/org/elasticsearch/common/joda/JodaTests.java @@ -26,15 +26,18 @@ import java.time.ZoneOffset; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; + public class JodaTests extends ESTestCase { public void testBasicTTimePattern() { - DateFormatter formatter1 = DateFormatter.forPattern("basic_t_time"); + DateFormatter formatter1 = Joda.forPattern("basic_t_time"); assertEquals(formatter1.pattern(), "basic_t_time"); assertEquals(formatter1.zone(), ZoneOffset.UTC); - DateFormatter formatter2 = DateFormatter.forPattern("basicTTime"); + DateFormatter formatter2 = Joda.forPattern("basicTTime"); assertEquals(formatter2.pattern(), "basicTTime"); assertEquals(formatter2.zone(), ZoneOffset.UTC); @@ -42,9 +45,25 @@ public void testBasicTTimePattern() { assertEquals("T102030.040Z", formatter1.formatJoda(dt)); assertEquals("T102030.040Z", formatter1.formatJoda(dt)); - expectThrows(IllegalArgumentException.class, () -> DateFormatter.forPattern("basic_t_Time")); - expectThrows(IllegalArgumentException.class, () -> DateFormatter.forPattern("basic_T_Time")); - expectThrows(IllegalArgumentException.class, () -> DateFormatter.forPattern("basic_T_time")); + expectThrows(IllegalArgumentException.class, () -> Joda.forPattern("basic_t_Time")); + expectThrows(IllegalArgumentException.class, () -> Joda.forPattern("basic_T_Time")); + expectThrows(IllegalArgumentException.class, () -> Joda.forPattern("basic_T_time")); } + public void testEqualsAndHashcode() { + String format = randomFrom("yyyy/MM/dd HH:mm:ss", "basic_t_time"); + JodaDateFormatter first = Joda.forPattern(format); + JodaDateFormatter second = Joda.forPattern(format); + JodaDateFormatter third = Joda.forPattern(" HH:mm:ss, yyyy/MM/dd"); + + assertThat(first, is(second)); + assertThat(second, is(first)); + assertThat(first, is(not(third))); + assertThat(second, is(not(third))); + + assertThat(first.hashCode(), is(second.hashCode())); + assertThat(second.hashCode(), is(first.hashCode())); + assertThat(first.hashCode(), is(not(third.hashCode()))); + assertThat(second.hashCode(), is(not(third.hashCode()))); + } } diff --git a/server/src/test/java/org/elasticsearch/common/joda/SimpleJodaTests.java b/server/src/test/java/org/elasticsearch/common/joda/SimpleJodaTests.java deleted file mode 100644 index b6f1b1b650a6f..0000000000000 --- a/server/src/test/java/org/elasticsearch/common/joda/SimpleJodaTests.java +++ /dev/null @@ -1,800 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.joda; - -import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.index.mapper.RootObjectMapper; -import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.LocalDateTime; -import org.joda.time.MutableDateTime; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.DateTimeFormatterBuilder; -import org.joda.time.format.DateTimeParser; -import org.joda.time.format.ISODateTimeFormat; - -import java.util.Date; -import java.util.Locale; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.endsWith; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; - -public class SimpleJodaTests extends ESTestCase { - public void testMultiParsers() { - DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder(); - DateTimeParser[] parsers = new DateTimeParser[3]; - parsers[0] = DateTimeFormat.forPattern("MM/dd/yyyy").withZone(DateTimeZone.UTC).getParser(); - parsers[1] = DateTimeFormat.forPattern("MM-dd-yyyy").withZone(DateTimeZone.UTC).getParser(); - parsers[2] = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss").withZone(DateTimeZone.UTC).getParser(); - builder.append(DateTimeFormat.forPattern("MM/dd/yyyy").withZone(DateTimeZone.UTC).getPrinter(), parsers); - - DateTimeFormatter formatter = builder.toFormatter(); - - formatter.parseMillis("2009-11-15 14:12:12"); - } - - public void testIsoDateFormatDateTimeNoMillisUTC() { - DateTimeFormatter formatter = ISODateTimeFormat.dateTimeNoMillis().withZone(DateTimeZone.UTC); - long millis = formatter.parseMillis("1970-01-01T00:00:00Z"); - - assertThat(millis, equalTo(0L)); - } - - public void testUpperBound() { - MutableDateTime dateTime = new MutableDateTime(3000, 12, 31, 23, 59, 59, 999, DateTimeZone.UTC); - DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC); - - String value = "2000-01-01"; - int i = formatter.parseInto(dateTime, value, 0); - assertThat(i, equalTo(value.length())); - assertThat(dateTime.toString(), equalTo("2000-01-01T23:59:59.999Z")); - } - - public void testIsoDateFormatDateOptionalTimeUTC() { - DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC); - long millis = formatter.parseMillis("1970-01-01T00:00:00Z"); - assertThat(millis, equalTo(0L)); - millis = formatter.parseMillis("1970-01-01T00:00:00.001Z"); - assertThat(millis, equalTo(1L)); - millis = formatter.parseMillis("1970-01-01T00:00:00.1Z"); - assertThat(millis, equalTo(100L)); - millis = formatter.parseMillis("1970-01-01T00:00:00.1"); - assertThat(millis, equalTo(100L)); - millis = formatter.parseMillis("1970-01-01T00:00:00"); - assertThat(millis, equalTo(0L)); - millis = formatter.parseMillis("1970-01-01"); - assertThat(millis, equalTo(0L)); - - millis = formatter.parseMillis("1970"); - assertThat(millis, equalTo(0L)); - - try { - formatter.parseMillis("1970 kuku"); - fail("formatting should fail"); - } catch (IllegalArgumentException e) { - // all is well - } - - // test offset in format - millis = formatter.parseMillis("1970-01-01T00:00:00-02:00"); - assertThat(millis, equalTo(TimeValue.timeValueHours(2).millis())); - } - - public void testIsoVsCustom() { - DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC); - long millis = formatter.parseMillis("1970-01-01T00:00:00"); - assertThat(millis, equalTo(0L)); - - formatter = DateTimeFormat.forPattern("yyyy/MM/dd HH:mm:ss").withZone(DateTimeZone.UTC); - millis = formatter.parseMillis("1970/01/01 00:00:00"); - assertThat(millis, equalTo(0L)); - - DateFormatter formatter2 = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss"); - millis = formatter2.parseMillis("1970/01/01 00:00:00"); - assertThat(millis, equalTo(0L)); - } - - public void testWriteAndParse() { - DateTimeFormatter dateTimeWriter = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC); - DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC); - Date date = new Date(); - assertThat(formatter.parseMillis(dateTimeWriter.print(date.getTime())), equalTo(date.getTime())); - } - - public void testSlashInFormat() { - DateFormatter formatter = DateFormatter.forPattern("MM/yyyy"); - formatter.parseMillis("01/2001"); - - DateFormatter formatter2 = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss"); - long millis = formatter2.parseMillis("1970/01/01 00:00:00"); - formatter2.formatMillis(millis); - - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> - formatter2.parseMillis("1970/01/01")); - } - - public void testMultipleFormats() { - DateFormatter formatter = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd"); - long millis = formatter.parseMillis("1970/01/01 00:00:00"); - assertThat("1970/01/01 00:00:00", is(formatter.formatMillis(millis))); - } - - public void testMultipleDifferentFormats() { - DateFormatter formatter = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd"); - String input = "1970/01/01 00:00:00"; - long millis = formatter.parseMillis(input); - assertThat(input, is(formatter.formatMillis(millis))); - - DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||dateOptionalTime"); - DateFormatter.forPattern("dateOptionalTime||yyyy/MM/dd HH:mm:ss||yyyy/MM/dd"); - DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||dateOptionalTime||yyyy/MM/dd"); - DateFormatter.forPattern("date_time||date_time_no_millis"); - DateFormatter.forPattern(" date_time || date_time_no_millis"); - } - - public void testInvalidPatterns() { - expectInvalidPattern("does_not_exist_pattern", "Invalid format: [does_not_exist_pattern]: Illegal pattern component: o"); - expectInvalidPattern("OOOOO", "Invalid format: [OOOOO]: Illegal pattern component: OOOOO"); - expectInvalidPattern(null, "No date pattern provided"); - expectInvalidPattern("", "No date pattern provided"); - expectInvalidPattern(" ", "No date pattern provided"); - expectInvalidPattern("||date_time_no_millis", "No date pattern provided"); - expectInvalidPattern("date_time_no_millis||", "No date pattern provided"); - } - - private void expectInvalidPattern(String pattern, String errorMessage) { - try { - DateFormatter.forPattern(pattern); - fail("Pattern " + pattern + " should have thrown an exception but did not"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString(errorMessage)); - } - } - - public void testRounding() { - long TIME = utcTimeInMillis("2009-02-03T01:01:01"); - MutableDateTime time = new MutableDateTime(DateTimeZone.UTC); - time.setMillis(TIME); - assertThat(time.monthOfYear().roundFloor().toString(), equalTo("2009-02-01T00:00:00.000Z")); - time.setMillis(TIME); - assertThat(time.hourOfDay().roundFloor().toString(), equalTo("2009-02-03T01:00:00.000Z")); - time.setMillis(TIME); - assertThat(time.dayOfMonth().roundFloor().toString(), equalTo("2009-02-03T00:00:00.000Z")); - } - - public void testRoundingSetOnTime() { - MutableDateTime time = new MutableDateTime(DateTimeZone.UTC); - time.setRounding(time.getChronology().monthOfYear(), MutableDateTime.ROUND_FLOOR); - time.setMillis(utcTimeInMillis("2009-02-03T01:01:01")); - assertThat(time.toString(), equalTo("2009-02-01T00:00:00.000Z")); - assertThat(time.getMillis(), equalTo(utcTimeInMillis("2009-02-01T00:00:00.000Z"))); - - time.setMillis(utcTimeInMillis("2009-05-03T01:01:01")); - assertThat(time.toString(), equalTo("2009-05-01T00:00:00.000Z")); - assertThat(time.getMillis(), equalTo(utcTimeInMillis("2009-05-01T00:00:00.000Z"))); - - time = new MutableDateTime(DateTimeZone.UTC); - time.setRounding(time.getChronology().dayOfMonth(), MutableDateTime.ROUND_FLOOR); - time.setMillis(utcTimeInMillis("2009-02-03T01:01:01")); - assertThat(time.toString(), equalTo("2009-02-03T00:00:00.000Z")); - assertThat(time.getMillis(), equalTo(utcTimeInMillis("2009-02-03T00:00:00.000Z"))); - - time.setMillis(utcTimeInMillis("2009-02-02T23:01:01")); - assertThat(time.toString(), equalTo("2009-02-02T00:00:00.000Z")); - assertThat(time.getMillis(), equalTo(utcTimeInMillis("2009-02-02T00:00:00.000Z"))); - - time = new MutableDateTime(DateTimeZone.UTC); - time.setRounding(time.getChronology().weekOfWeekyear(), MutableDateTime.ROUND_FLOOR); - time.setMillis(utcTimeInMillis("2011-05-05T01:01:01")); - assertThat(time.toString(), equalTo("2011-05-02T00:00:00.000Z")); - assertThat(time.getMillis(), equalTo(utcTimeInMillis("2011-05-02T00:00:00.000Z"))); - } - - public void testRoundingWithTimeZone() { - MutableDateTime time = new MutableDateTime(DateTimeZone.UTC); - time.setZone(DateTimeZone.forOffsetHours(-2)); - time.setRounding(time.getChronology().dayOfMonth(), MutableDateTime.ROUND_FLOOR); - - MutableDateTime utcTime = new MutableDateTime(DateTimeZone.UTC); - utcTime.setRounding(utcTime.getChronology().dayOfMonth(), MutableDateTime.ROUND_FLOOR); - - time.setMillis(utcTimeInMillis("2009-02-03T01:01:01")); - utcTime.setMillis(utcTimeInMillis("2009-02-03T01:01:01")); - - assertThat(time.toString(), equalTo("2009-02-02T00:00:00.000-02:00")); - assertThat(utcTime.toString(), equalTo("2009-02-03T00:00:00.000Z")); - // the time is on the 2nd, and utcTime is on the 3rd, but, because time already encapsulates - // time zone, the millis diff is not 24, but 22 hours - assertThat(time.getMillis(), equalTo(utcTime.getMillis() - TimeValue.timeValueHours(22).millis())); - - time.setMillis(utcTimeInMillis("2009-02-04T01:01:01")); - utcTime.setMillis(utcTimeInMillis("2009-02-04T01:01:01")); - assertThat(time.toString(), equalTo("2009-02-03T00:00:00.000-02:00")); - assertThat(utcTime.toString(), equalTo("2009-02-04T00:00:00.000Z")); - assertThat(time.getMillis(), equalTo(utcTime.getMillis() - TimeValue.timeValueHours(22).millis())); - } - - public void testThatEpochsCanBeParsed() { - boolean parseMilliSeconds = randomBoolean(); - - // epoch: 1433144433655 => date: Mon Jun 1 09:40:33.655 CEST 2015 - DateFormatter formatter = DateFormatter.forPattern(parseMilliSeconds ? "epoch_millis" : "epoch_second"); - DateTime dateTime = formatter.parseJoda(parseMilliSeconds ? "1433144433655" : "1433144433"); - - assertThat(dateTime.getYear(), is(2015)); - assertThat(dateTime.getDayOfMonth(), is(1)); - assertThat(dateTime.getMonthOfYear(), is(6)); - assertThat(dateTime.getHourOfDay(), is(7)); // utc timezone, +2 offset due to CEST - assertThat(dateTime.getMinuteOfHour(), is(40)); - assertThat(dateTime.getSecondOfMinute(), is(33)); - - if (parseMilliSeconds) { - assertThat(dateTime.getMillisOfSecond(), is(655)); - } else { - assertThat(dateTime.getMillisOfSecond(), is(0)); - } - - // test floats get truncated - String epochFloatValue = String.format(Locale.US, "%d.%d", dateTime.getMillis() / (parseMilliSeconds ? 1L : 1000L), - randomNonNegativeLong()); - assertThat(formatter.parseJoda(epochFloatValue).getMillis(), is(dateTime.getMillis())); - } - - public void testThatNegativeEpochsCanBeParsed() { - // problem: negative epochs can be arbitrary in size... - boolean parseMilliSeconds = randomBoolean(); - DateFormatter formatter = DateFormatter.forPattern(parseMilliSeconds ? "epoch_millis" : "epoch_second"); - DateTime dateTime = formatter.parseJoda("-10000"); - - assertThat(dateTime.getYear(), is(1969)); - assertThat(dateTime.getMonthOfYear(), is(12)); - assertThat(dateTime.getDayOfMonth(), is(31)); - if (parseMilliSeconds) { - assertThat(dateTime.getHourOfDay(), is(23)); // utc timezone, +2 offset due to CEST - assertThat(dateTime.getMinuteOfHour(), is(59)); - assertThat(dateTime.getSecondOfMinute(), is(50)); - } else { - assertThat(dateTime.getHourOfDay(), is(21)); // utc timezone, +2 offset due to CEST - assertThat(dateTime.getMinuteOfHour(), is(13)); - assertThat(dateTime.getSecondOfMinute(), is(20)); - } - - // test floats get truncated - String epochFloatValue = String.format(Locale.US, "%d.%d", dateTime.getMillis() / (parseMilliSeconds ? 1L : 1000L), - randomNonNegativeLong()); - assertThat(formatter.parseJoda(epochFloatValue).getMillis(), is(dateTime.getMillis())); - - // every negative epoch must be parsed, no matter if exact the size or bigger - if (parseMilliSeconds) { - formatter.parseJoda("-100000000"); - formatter.parseJoda("-999999999999"); - formatter.parseJoda("-1234567890123"); - formatter.parseJoda("-1234567890123456789"); - - formatter.parseJoda("-1234567890123.9999"); - formatter.parseJoda("-1234567890123456789.9999"); - } else { - formatter.parseJoda("-100000000"); - formatter.parseJoda("-1234567890"); - formatter.parseJoda("-1234567890123456"); - - formatter.parseJoda("-1234567890.9999"); - formatter.parseJoda("-1234567890123456.9999"); - } - - assertWarnings("Use of negative values" + - " in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch."); - } - - public void testForInvalidDatesInEpochSecond() { - DateFormatter formatter = DateFormatter.forPattern("epoch_second"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> - formatter.parseJoda(randomFrom("invalid date", "12345678901234567", "12345678901234567890"))); - assertThat(e.getMessage(), containsString("Invalid format")); - } - - public void testForInvalidDatesInEpochMillis() { - DateFormatter formatter = DateFormatter.forPattern("epoch_millis"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> - formatter.parseJoda(randomFrom("invalid date", "12345678901234567890"))); - assertThat(e.getMessage(), containsString("Invalid format")); - } - - public void testForInvalidTimeZoneWithEpochSeconds() { - DateTimeFormatter dateTimeFormatter = new DateTimeFormatterBuilder() - .append(new Joda.EpochTimeParser(false)) - .toFormatter() - .withZone(DateTimeZone.forOffsetHours(1)) - .withLocale(Locale.ROOT); - DateFormatter formatter = - new JodaDateFormatter("epoch_seconds", dateTimeFormatter, dateTimeFormatter); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> - formatter.parseJoda("1433144433655")); - assertThat(e.getMessage(), containsString("time_zone must be UTC")); - } - - public void testForInvalidTimeZoneWithEpochMillis() { - DateTimeFormatter dateTimeFormatter = new DateTimeFormatterBuilder() - .append(new Joda.EpochTimeParser(true)) - .toFormatter() - .withZone(DateTimeZone.forOffsetHours(1)) - .withLocale(Locale.ROOT); - DateFormatter formatter = - new JodaDateFormatter("epoch_millis", dateTimeFormatter, dateTimeFormatter); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> - formatter.parseJoda("1433144433")); - assertThat(e.getMessage(), containsString("time_zone must be UTC")); - } - - public void testThatEpochParserIsPrinter() { - JodaDateFormatter formatter = Joda.forPattern("epoch_millis"); - assertThat(formatter.parser.isPrinter(), is(true)); - assertThat(formatter.printer.isPrinter(), is(true)); - - JodaDateFormatter epochSecondFormatter = Joda.forPattern("epoch_second"); - assertThat(epochSecondFormatter.parser.isPrinter(), is(true)); - assertThat(epochSecondFormatter.printer.isPrinter(), is(true)); - } - - public void testThatEpochTimePrinterWorks() { - StringBuffer buffer = new StringBuffer(); - LocalDateTime now = LocalDateTime.now(); - - Joda.EpochTimePrinter epochTimePrinter = new Joda.EpochTimePrinter(false); - epochTimePrinter.printTo(buffer, now, Locale.ROOT); - assertThat(buffer.length(), is(10)); - // only check the last digit, as seconds go from 0-99 in the unix timestamp and don't stop at 60 - assertThat(buffer.toString(), endsWith(String.valueOf(now.getSecondOfMinute() % 10))); - - buffer = new StringBuffer(); - Joda.EpochTimePrinter epochMilliSecondTimePrinter = new Joda.EpochTimePrinter(true); - epochMilliSecondTimePrinter.printTo(buffer, now, Locale.ROOT); - assertThat(buffer.length(), is(13)); - assertThat(buffer.toString(), endsWith(String.valueOf(now.getMillisOfSecond()))); - } - - public void testThatEpochParserIsIdempotent() { - DateFormatter formatter = DateFormatter.forPattern("epoch_millis"); - DateTime dateTime = formatter.parseJoda("1234567890123"); - assertThat(dateTime.getMillis(), is(1234567890123L)); - dateTime = formatter.parseJoda("1234567890456"); - assertThat(dateTime.getMillis(), is(1234567890456L)); - dateTime = formatter.parseJoda("1234567890789"); - assertThat(dateTime.getMillis(), is(1234567890789L)); - dateTime = formatter.parseJoda("1234567890123456789"); - assertThat(dateTime.getMillis(), is(1234567890123456789L)); - - DateFormatter secondsFormatter = DateFormatter.forPattern("epoch_second"); - DateTime secondsDateTime = secondsFormatter.parseJoda("1234567890"); - assertThat(secondsDateTime.getMillis(), is(1234567890000L)); - secondsDateTime = secondsFormatter.parseJoda("1234567890"); - assertThat(secondsDateTime.getMillis(), is(1234567890000L)); - secondsDateTime = secondsFormatter.parseJoda("1234567890"); - assertThat(secondsDateTime.getMillis(), is(1234567890000L)); - secondsDateTime = secondsFormatter.parseJoda("1234567890123456"); - assertThat(secondsDateTime.getMillis(), is(1234567890123456000L)); - } - - public void testThatDefaultFormatterChecksForCorrectYearLength() throws Exception { - // if no strict version is tested, this means the date format is already strict by itself - // yyyyMMdd - assertValidDateFormatParsing("basicDate", "20140303"); - assertDateFormatParsingThrowingException("basicDate", "2010303"); - - // yyyyMMdd’T'HHmmss.SSSZ - assertValidDateFormatParsing("basicDateTime", "20140303T124343.123Z"); - assertValidDateFormatParsing("basicDateTime", "00050303T124343.123Z"); - assertDateFormatParsingThrowingException("basicDateTime", "50303T124343.123Z"); - - // yyyyMMdd’T'HHmmssZ - assertValidDateFormatParsing("basicDateTimeNoMillis", "20140303T124343Z"); - assertValidDateFormatParsing("basicDateTimeNoMillis", "00050303T124343Z"); - assertDateFormatParsingThrowingException("basicDateTimeNoMillis", "50303T124343Z"); - - // yyyyDDD - assertValidDateFormatParsing("basicOrdinalDate", "0005165"); - assertDateFormatParsingThrowingException("basicOrdinalDate", "5165"); - - // yyyyDDD’T'HHmmss.SSSZ - assertValidDateFormatParsing("basicOrdinalDateTime", "0005165T124343.123Z"); - assertValidDateFormatParsing("basicOrdinalDateTime", "0005165T124343.123Z"); - assertDateFormatParsingThrowingException("basicOrdinalDateTime", "5165T124343.123Z"); - - // yyyyDDD’T'HHmmssZ - assertValidDateFormatParsing("basicOrdinalDateTimeNoMillis", "0005165T124343Z"); - assertValidDateFormatParsing("basicOrdinalDateTimeNoMillis", "0005165T124343Z"); - assertDateFormatParsingThrowingException("basicOrdinalDateTimeNoMillis", "5165T124343Z"); - - // HHmmss.SSSZ - assertValidDateFormatParsing("basicTime", "090909.123Z"); - assertDateFormatParsingThrowingException("basicTime", "90909.123Z"); - - // HHmmssZ - assertValidDateFormatParsing("basicTimeNoMillis", "090909Z"); - assertDateFormatParsingThrowingException("basicTimeNoMillis", "90909Z"); - - // 'T’HHmmss.SSSZ - assertValidDateFormatParsing("basicTTime", "T090909.123Z"); - assertDateFormatParsingThrowingException("basicTTime", "T90909.123Z"); - - // T’HHmmssZ - assertValidDateFormatParsing("basicTTimeNoMillis", "T090909Z"); - assertDateFormatParsingThrowingException("basicTTimeNoMillis", "T90909Z"); - - // xxxx’W'wwe - assertValidDateFormatParsing("basicWeekDate", "0005W414"); - assertValidDateFormatParsing("basicWeekDate", "5W414", "0005W414"); - assertDateFormatParsingThrowingException("basicWeekDate", "5W14"); - - assertValidDateFormatParsing("strictBasicWeekDate", "0005W414"); - assertDateFormatParsingThrowingException("strictBasicWeekDate", "0005W47"); - assertDateFormatParsingThrowingException("strictBasicWeekDate", "5W414"); - assertDateFormatParsingThrowingException("strictBasicWeekDate", "5W14"); - - // xxxx’W'wwe’T'HHmmss.SSSZ - assertValidDateFormatParsing("basicWeekDateTime", "0005W414T124343.123Z"); - assertValidDateFormatParsing("basicWeekDateTime", "5W414T124343.123Z", "0005W414T124343.123Z"); - assertDateFormatParsingThrowingException("basicWeekDateTime", "5W14T124343.123Z"); - - assertValidDateFormatParsing("strictBasicWeekDateTime", "0005W414T124343.123Z"); - assertDateFormatParsingThrowingException("strictBasicWeekDateTime", "0005W47T124343.123Z"); - assertDateFormatParsingThrowingException("strictBasicWeekDateTime", "5W414T124343.123Z"); - assertDateFormatParsingThrowingException("strictBasicWeekDateTime", "5W14T124343.123Z"); - - // xxxx’W'wwe’T'HHmmssZ - assertValidDateFormatParsing("basicWeekDateTimeNoMillis", "0005W414T124343Z"); - assertValidDateFormatParsing("basicWeekDateTimeNoMillis", "5W414T124343Z", "0005W414T124343Z"); - assertDateFormatParsingThrowingException("basicWeekDateTimeNoMillis", "5W14T124343Z"); - - assertValidDateFormatParsing("strictBasicWeekDateTimeNoMillis", "0005W414T124343Z"); - assertDateFormatParsingThrowingException("strictBasicWeekDateTimeNoMillis", "0005W47T124343Z"); - assertDateFormatParsingThrowingException("strictBasicWeekDateTimeNoMillis", "5W414T124343Z"); - assertDateFormatParsingThrowingException("strictBasicWeekDateTimeNoMillis", "5W14T124343Z"); - - // yyyy-MM-dd - assertValidDateFormatParsing("date", "0005-06-03"); - assertValidDateFormatParsing("date", "5-6-3", "0005-06-03"); - - assertValidDateFormatParsing("strictDate", "0005-06-03"); - assertDateFormatParsingThrowingException("strictDate", "5-6-3"); - assertDateFormatParsingThrowingException("strictDate", "0005-06-3"); - assertDateFormatParsingThrowingException("strictDate", "0005-6-03"); - assertDateFormatParsingThrowingException("strictDate", "5-06-03"); - - // yyyy-MM-dd'T'HH - assertValidDateFormatParsing("dateHour", "0005-06-03T12"); - assertValidDateFormatParsing("dateHour", "5-6-3T1", "0005-06-03T01"); - - assertValidDateFormatParsing("strictDateHour", "0005-06-03T12"); - assertDateFormatParsingThrowingException("strictDateHour", "5-6-3T1"); - - // yyyy-MM-dd'T'HH:mm - assertValidDateFormatParsing("dateHourMinute", "0005-06-03T12:12"); - assertValidDateFormatParsing("dateHourMinute", "5-6-3T12:1", "0005-06-03T12:01"); - - assertValidDateFormatParsing("strictDateHourMinute", "0005-06-03T12:12"); - assertDateFormatParsingThrowingException("strictDateHourMinute", "5-6-3T12:1"); - - // yyyy-MM-dd'T'HH:mm:ss - assertValidDateFormatParsing("dateHourMinuteSecond", "0005-06-03T12:12:12"); - assertValidDateFormatParsing("dateHourMinuteSecond", "5-6-3T12:12:1", "0005-06-03T12:12:01"); - - assertValidDateFormatParsing("strictDateHourMinuteSecond", "0005-06-03T12:12:12"); - assertDateFormatParsingThrowingException("strictDateHourMinuteSecond", "5-6-3T12:12:1"); - - // yyyy-MM-dd’T'HH:mm:ss.SSS - assertValidDateFormatParsing("dateHourMinuteSecondFraction", "0005-06-03T12:12:12.123"); - assertValidDateFormatParsing("dateHourMinuteSecondFraction", "5-6-3T12:12:1.123", "0005-06-03T12:12:01.123"); - assertValidDateFormatParsing("dateHourMinuteSecondFraction", "5-6-3T12:12:1.1", "0005-06-03T12:12:01.100"); - - assertValidDateFormatParsing("strictDateHourMinuteSecondFraction", "0005-06-03T12:12:12.123"); - assertDateFormatParsingThrowingException("strictDateHourMinuteSecondFraction", "5-6-3T12:12:12.1"); - assertDateFormatParsingThrowingException("strictDateHourMinuteSecondFraction", "5-6-3T12:12:12.12"); - - assertValidDateFormatParsing("dateHourMinuteSecondMillis", "0005-06-03T12:12:12.123"); - assertValidDateFormatParsing("dateHourMinuteSecondMillis", "5-6-3T12:12:1.123", "0005-06-03T12:12:01.123"); - assertValidDateFormatParsing("dateHourMinuteSecondMillis", "5-6-3T12:12:1.1", "0005-06-03T12:12:01.100"); - - assertValidDateFormatParsing("strictDateHourMinuteSecondMillis", "0005-06-03T12:12:12.123"); - assertDateFormatParsingThrowingException("strictDateHourMinuteSecondMillis", "5-6-3T12:12:12.1"); - assertDateFormatParsingThrowingException("strictDateHourMinuteSecondMillis", "5-6-3T12:12:12.12"); - - // yyyy-MM-dd'T'HH:mm:ss.SSSZ - assertValidDateFormatParsing("dateOptionalTime", "2014-03-03", "2014-03-03T00:00:00.000Z"); - assertValidDateFormatParsing("dateOptionalTime", "1257-3-03", "1257-03-03T00:00:00.000Z"); - assertValidDateFormatParsing("dateOptionalTime", "0005-03-3", "0005-03-03T00:00:00.000Z"); - assertValidDateFormatParsing("dateOptionalTime", "5-03-03", "0005-03-03T00:00:00.000Z"); - assertValidDateFormatParsing("dateOptionalTime", "5-03-03T1:1:1.1", "0005-03-03T01:01:01.100Z"); - assertValidDateFormatParsing("strictDateOptionalTime", "2014-03-03", "2014-03-03T00:00:00.000Z"); - assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03"); - assertDateFormatParsingThrowingException("strictDateOptionalTime", "0005-3-03"); - assertDateFormatParsingThrowingException("strictDateOptionalTime", "0005-03-3"); - assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03T1:1:1.1"); - assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03T01:01:01.1"); - assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03T01:01:1.100"); - assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03T01:1:01.100"); - assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03T1:01:01.100"); - - // yyyy-MM-dd’T'HH:mm:ss.SSSZZ - assertValidDateFormatParsing("dateTime", "5-03-03T1:1:1.1Z", "0005-03-03T01:01:01.100Z"); - assertValidDateFormatParsing("strictDateTime", "2014-03-03T11:11:11.100Z", "2014-03-03T11:11:11.100Z"); - assertDateFormatParsingThrowingException("strictDateTime", "0005-03-03T1:1:1.1Z"); - assertDateFormatParsingThrowingException("strictDateTime", "0005-03-03T01:01:1.100Z"); - assertDateFormatParsingThrowingException("strictDateTime", "0005-03-03T01:1:01.100Z"); - assertDateFormatParsingThrowingException("strictDateTime", "0005-03-03T1:01:01.100Z"); - - // yyyy-MM-dd’T'HH:mm:ssZZ - assertValidDateFormatParsing("dateTimeNoMillis", "5-03-03T1:1:1Z", "0005-03-03T01:01:01Z"); - assertValidDateFormatParsing("strictDateTimeNoMillis", "2014-03-03T11:11:11Z", "2014-03-03T11:11:11Z"); - assertDateFormatParsingThrowingException("strictDateTimeNoMillis", "0005-03-03T1:1:1Z"); - assertDateFormatParsingThrowingException("strictDateTimeNoMillis", "0005-03-03T01:01:1Z"); - assertDateFormatParsingThrowingException("strictDateTimeNoMillis", "0005-03-03T01:1:01Z"); - assertDateFormatParsingThrowingException("strictDateTimeNoMillis", "0005-03-03T1:01:01Z"); - - // HH - assertValidDateFormatParsing("hour", "12"); - assertValidDateFormatParsing("hour", "1", "01"); - assertValidDateFormatParsing("strictHour", "12"); - assertValidDateFormatParsing("strictHour", "01"); - assertDateFormatParsingThrowingException("strictHour", "1"); - - // HH:mm - assertValidDateFormatParsing("hourMinute", "12:12"); - assertValidDateFormatParsing("hourMinute", "12:1", "12:01"); - assertValidDateFormatParsing("strictHourMinute", "12:12"); - assertValidDateFormatParsing("strictHourMinute", "12:01"); - assertDateFormatParsingThrowingException("strictHourMinute", "12:1"); - - // HH:mm:ss - assertValidDateFormatParsing("hourMinuteSecond", "12:12:12"); - assertValidDateFormatParsing("hourMinuteSecond", "12:12:1", "12:12:01"); - assertValidDateFormatParsing("strictHourMinuteSecond", "12:12:12"); - assertValidDateFormatParsing("strictHourMinuteSecond", "12:12:01"); - assertDateFormatParsingThrowingException("strictHourMinuteSecond", "12:12:1"); - - // HH:mm:ss.SSS - assertValidDateFormatParsing("hourMinuteSecondFraction", "12:12:12.123"); - assertValidDateFormatParsing("hourMinuteSecondFraction", "12:12:12.1", "12:12:12.100"); - assertValidDateFormatParsing("strictHourMinuteSecondFraction", "12:12:12.123"); - assertValidDateFormatParsing("strictHourMinuteSecondFraction", "12:12:12.1", "12:12:12.100"); - - assertValidDateFormatParsing("hourMinuteSecondMillis", "12:12:12.123"); - assertValidDateFormatParsing("hourMinuteSecondMillis", "12:12:12.1", "12:12:12.100"); - assertValidDateFormatParsing("strictHourMinuteSecondMillis", "12:12:12.123"); - assertValidDateFormatParsing("strictHourMinuteSecondMillis", "12:12:12.1", "12:12:12.100"); - - // yyyy-DDD - assertValidDateFormatParsing("ordinalDate", "5-3", "0005-003"); - assertValidDateFormatParsing("strictOrdinalDate", "0005-003"); - assertDateFormatParsingThrowingException("strictOrdinalDate", "5-3"); - assertDateFormatParsingThrowingException("strictOrdinalDate", "0005-3"); - assertDateFormatParsingThrowingException("strictOrdinalDate", "5-003"); - - // yyyy-DDD’T'HH:mm:ss.SSSZZ - assertValidDateFormatParsing("ordinalDateTime", "5-3T12:12:12.100Z", "0005-003T12:12:12.100Z"); - assertValidDateFormatParsing("strictOrdinalDateTime", "0005-003T12:12:12.100Z"); - assertDateFormatParsingThrowingException("strictOrdinalDateTime", "5-3T1:12:12.123Z"); - assertDateFormatParsingThrowingException("strictOrdinalDateTime", "5-3T12:1:12.123Z"); - assertDateFormatParsingThrowingException("strictOrdinalDateTime", "5-3T12:12:1.123Z"); - - // yyyy-DDD’T'HH:mm:ssZZ - assertValidDateFormatParsing("ordinalDateTimeNoMillis", "5-3T12:12:12Z", "0005-003T12:12:12Z"); - assertValidDateFormatParsing("strictOrdinalDateTimeNoMillis", "0005-003T12:12:12Z"); - assertDateFormatParsingThrowingException("strictOrdinalDateTimeNoMillis", "5-3T1:12:12Z"); - assertDateFormatParsingThrowingException("strictOrdinalDateTimeNoMillis", "5-3T12:1:12Z"); - assertDateFormatParsingThrowingException("strictOrdinalDateTimeNoMillis", "5-3T12:12:1Z"); - - - // HH:mm:ss.SSSZZ - assertValidDateFormatParsing("time", "12:12:12.100Z"); - assertValidDateFormatParsing("time", "01:01:01.1Z", "01:01:01.100Z"); - assertValidDateFormatParsing("time", "1:1:1.1Z", "01:01:01.100Z"); - assertValidDateFormatParsing("strictTime", "12:12:12.100Z"); - assertDateFormatParsingThrowingException("strictTime", "12:12:1.100Z"); - assertDateFormatParsingThrowingException("strictTime", "12:1:12.100Z"); - assertDateFormatParsingThrowingException("strictTime", "1:12:12.100Z"); - - // HH:mm:ssZZ - assertValidDateFormatParsing("timeNoMillis", "12:12:12Z"); - assertValidDateFormatParsing("timeNoMillis", "01:01:01Z", "01:01:01Z"); - assertValidDateFormatParsing("timeNoMillis", "1:1:1Z", "01:01:01Z"); - assertValidDateFormatParsing("strictTimeNoMillis", "12:12:12Z"); - assertDateFormatParsingThrowingException("strictTimeNoMillis", "12:12:1Z"); - assertDateFormatParsingThrowingException("strictTimeNoMillis", "12:1:12Z"); - assertDateFormatParsingThrowingException("strictTimeNoMillis", "1:12:12Z"); - - // 'T’HH:mm:ss.SSSZZ - assertValidDateFormatParsing("tTime", "T12:12:12.100Z"); - assertValidDateFormatParsing("tTime", "T01:01:01.1Z", "T01:01:01.100Z"); - assertValidDateFormatParsing("tTime", "T1:1:1.1Z", "T01:01:01.100Z"); - assertValidDateFormatParsing("strictTTime", "T12:12:12.100Z"); - assertDateFormatParsingThrowingException("strictTTime", "T12:12:1.100Z"); - assertDateFormatParsingThrowingException("strictTTime", "T12:1:12.100Z"); - assertDateFormatParsingThrowingException("strictTTime", "T1:12:12.100Z"); - - // 'T’HH:mm:ssZZ - assertValidDateFormatParsing("tTimeNoMillis", "T12:12:12Z"); - assertValidDateFormatParsing("tTimeNoMillis", "T01:01:01Z", "T01:01:01Z"); - assertValidDateFormatParsing("tTimeNoMillis", "T1:1:1Z", "T01:01:01Z"); - assertValidDateFormatParsing("strictTTimeNoMillis", "T12:12:12Z"); - assertDateFormatParsingThrowingException("strictTTimeNoMillis", "T12:12:1Z"); - assertDateFormatParsingThrowingException("strictTTimeNoMillis", "T12:1:12Z"); - assertDateFormatParsingThrowingException("strictTTimeNoMillis", "T1:12:12Z"); - - // xxxx-'W’ww-e - assertValidDateFormatParsing("weekDate", "0005-W4-1", "0005-W04-1"); - assertValidDateFormatParsing("strictWeekDate", "0005-W04-1"); - assertDateFormatParsingThrowingException("strictWeekDate", "0005-W4-1"); - - // xxxx-'W’ww-e’T'HH:mm:ss.SSSZZ - assertValidDateFormatParsing("weekDateTime", "0005-W41-4T12:43:43.123Z"); - assertValidDateFormatParsing("weekDateTime", "5-W41-4T12:43:43.123Z", "0005-W41-4T12:43:43.123Z"); - assertValidDateFormatParsing("strictWeekDateTime", "0005-W41-4T12:43:43.123Z"); - assertValidDateFormatParsing("strictWeekDateTime", "0005-W06-4T12:43:43.123Z"); - assertDateFormatParsingThrowingException("strictWeekDateTime", "0005-W4-7T12:43:43.123Z"); - assertDateFormatParsingThrowingException("strictWeekDateTime", "5-W41-4T12:43:43.123Z"); - assertDateFormatParsingThrowingException("strictWeekDateTime", "5-W1-4T12:43:43.123Z"); - - // xxxx-'W’ww-e’T'HH:mm:ssZZ - assertValidDateFormatParsing("weekDateTimeNoMillis", "0005-W41-4T12:43:43Z"); - assertValidDateFormatParsing("weekDateTimeNoMillis", "5-W41-4T12:43:43Z", "0005-W41-4T12:43:43Z"); - assertValidDateFormatParsing("strictWeekDateTimeNoMillis", "0005-W41-4T12:43:43Z"); - assertValidDateFormatParsing("strictWeekDateTimeNoMillis", "0005-W06-4T12:43:43Z"); - assertDateFormatParsingThrowingException("strictWeekDateTimeNoMillis", "0005-W4-7T12:43:43Z"); - assertDateFormatParsingThrowingException("strictWeekDateTimeNoMillis", "5-W41-4T12:43:43Z"); - assertDateFormatParsingThrowingException("strictWeekDateTimeNoMillis", "5-W1-4T12:43:43Z"); - - // yyyy - assertValidDateFormatParsing("weekyear", "2014"); - assertValidDateFormatParsing("weekyear", "5", "0005"); - assertValidDateFormatParsing("weekyear", "0005"); - assertValidDateFormatParsing("strictWeekyear", "2014"); - assertValidDateFormatParsing("strictWeekyear", "0005"); - assertDateFormatParsingThrowingException("strictWeekyear", "5"); - - // yyyy-'W'ee - assertValidDateFormatParsing("weekyearWeek", "2014-W41"); - assertValidDateFormatParsing("weekyearWeek", "2014-W1", "2014-W01"); - assertValidDateFormatParsing("strictWeekyearWeek", "2014-W41"); - assertDateFormatParsingThrowingException("strictWeekyearWeek", "2014-W1"); - - // weekyearWeekDay - assertValidDateFormatParsing("weekyearWeekDay", "2014-W41-1"); - assertValidDateFormatParsing("weekyearWeekDay", "2014-W1-1", "2014-W01-1"); - assertValidDateFormatParsing("strictWeekyearWeekDay", "2014-W41-1"); - assertDateFormatParsingThrowingException("strictWeekyearWeekDay", "2014-W1-1"); - - // yyyy - assertValidDateFormatParsing("year", "2014"); - assertValidDateFormatParsing("year", "5", "0005"); - assertValidDateFormatParsing("strictYear", "2014"); - assertDateFormatParsingThrowingException("strictYear", "5"); - - // yyyy-mm - assertValidDateFormatParsing("yearMonth", "2014-12"); - assertValidDateFormatParsing("yearMonth", "2014-5", "2014-05"); - assertValidDateFormatParsing("strictYearMonth", "2014-12"); - assertDateFormatParsingThrowingException("strictYearMonth", "2014-5"); - - // yyyy-mm-dd - assertValidDateFormatParsing("yearMonthDay", "2014-12-12"); - assertValidDateFormatParsing("yearMonthDay", "2014-05-5", "2014-05-05"); - assertValidDateFormatParsing("strictYearMonthDay", "2014-12-12"); - assertDateFormatParsingThrowingException("strictYearMonthDay", "2014-05-5"); - } - - public void testThatRootObjectParsingIsStrict() throws Exception { - String[] datesThatWork = new String[] { "2014/10/10", "2014/10/10 12:12:12", "2014-05-05", "2014-05-05T12:12:12.123Z" }; - String[] datesThatShouldNotWork = new String[]{ "5-05-05", "2014-5-05", "2014-05-5", - "2014-05-05T1:12:12.123Z", "2014-05-05T12:1:12.123Z", "2014-05-05T12:12:1.123Z", - "4/10/10", "2014/1/10", "2014/10/1", - "2014/10/10 1:12:12", "2014/10/10 12:1:12", "2014/10/10 12:12:1" - }; - - // good case - for (String date : datesThatWork) { - boolean dateParsingSuccessful = false; - for (DateFormatter dateTimeFormatter : RootObjectMapper.Defaults.DYNAMIC_DATE_TIME_FORMATTERS) { - try { - dateTimeFormatter.parseMillis(date); - dateParsingSuccessful = true; - break; - } catch (Exception e) {} - } - if (!dateParsingSuccessful) { - fail("Parsing for date " + date + " in root object mapper failed, but shouldnt"); - } - } - - // bad case - for (String date : datesThatShouldNotWork) { - for (DateFormatter dateTimeFormatter : RootObjectMapper.Defaults.DYNAMIC_DATE_TIME_FORMATTERS) { - try { - dateTimeFormatter.parseMillis(date); - fail(String.format(Locale.ROOT, "Expected exception when parsing date %s in root mapper", date)); - } catch (Exception e) {} - } - } - } - - public void testDeprecatedFormatSpecifiers() { - Joda.forPattern("CC"); - assertWarnings("Use of 'C' (century-of-era) is deprecated and will not be supported in the" + - " next major version of Elasticsearch."); - Joda.forPattern("YYYY"); - assertWarnings("Use of 'Y' (year-of-era) will change to 'y' in the" + - " next major version of Elasticsearch. Prefix your date format with '8' to use the new specifier."); - Joda.forPattern("xxxx"); - assertWarnings("Use of 'x' (week-based-year) will change" + - " to 'Y' in the next major version of Elasticsearch. Prefix your date format with '8' to use the new specifier."); - // multiple deprecations - Joda.forPattern("CC-YYYY"); - assertWarnings("Use of 'C' (century-of-era) is deprecated and will not be supported in the" + - " next major version of Elasticsearch.", "Use of 'Y' (year-of-era) will change to 'y' in the" + - " next major version of Elasticsearch. Prefix your date format with '8' to use the new specifier."); - } - - public void testDeprecatedEpochScientificNotation() { - assertValidDateFormatParsing("epoch_second", "1.234e5", "123400"); - assertWarnings("Use of scientific notation" + - " in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch."); - assertValidDateFormatParsing("epoch_millis", "1.234e5", "123400"); - assertWarnings("Use of scientific notation" + - " in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch."); - } - - public void testDeprecatedEpochNegative() { - assertValidDateFormatParsing("epoch_second", "-12345", "-12345"); - assertWarnings("Use of negative values" + - " in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch."); - assertValidDateFormatParsing("epoch_millis", "-12345", "-12345"); - assertWarnings("Use of negative values" + - " in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch."); - } - - private void assertValidDateFormatParsing(String pattern, String dateToParse) { - assertValidDateFormatParsing(pattern, dateToParse, dateToParse); - } - - private void assertValidDateFormatParsing(String pattern, String dateToParse, String expectedDate) { - DateFormatter formatter = DateFormatter.forPattern(pattern); - assertThat(formatter.formatMillis(formatter.parseMillis(dateToParse)), is(expectedDate)); - } - - private void assertDateFormatParsingThrowingException(String pattern, String invalidDate) { - try { - DateFormatter formatter = DateFormatter.forPattern(pattern); - formatter.parseMillis(invalidDate); - fail(String.format(Locale.ROOT, "Expected parsing exception for pattern [%s] with date [%s], but did not happen", - pattern, invalidDate)); - } catch (IllegalArgumentException e) { - } - } - - private long utcTimeInMillis(String time) { - return ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC).parseMillis(time); - } - -} diff --git a/server/src/test/java/org/elasticsearch/common/rounding/RoundingDuelTests.java b/server/src/test/java/org/elasticsearch/common/rounding/RoundingDuelTests.java index 7e3dbdd5b94df..3ee4ce0e7d7bf 100644 --- a/server/src/test/java/org/elasticsearch/common/rounding/RoundingDuelTests.java +++ b/server/src/test/java/org/elasticsearch/common/rounding/RoundingDuelTests.java @@ -19,9 +19,11 @@ package org.elasticsearch.common.rounding; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import java.time.ZoneOffset; @@ -42,6 +44,7 @@ public void testSerialization() throws Exception { rounding = org.elasticsearch.common.Rounding.builder(timeValue()).timeZone(ZoneOffset.UTC).build(); } BytesStreamOutput output = new BytesStreamOutput(); + output.setVersion(VersionUtils.getPreviousVersion(Version.V_7_0_0)); rounding.writeTo(output); Rounding roundingJoda = Rounding.Streams.read(output.bytes().streamInput()); diff --git a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java index feb406c61c966..96ef39e430178 100644 --- a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java +++ b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java @@ -25,7 +25,6 @@ import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; -import java.time.format.DateTimeParseException; import java.time.temporal.ChronoField; import java.time.temporal.TemporalAccessor; import java.util.Locale; @@ -57,13 +56,13 @@ public void testEpochMillisParser() { } } - public void testEpochMilliParser() { + public void testInvalidEpochMilliParser() { DateFormatter formatter = DateFormatters.forPattern("epoch_millis"); - DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("invalid")); - assertThat(e.getMessage(), containsString("could not be parsed")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("invalid")); + assertThat(e.getMessage(), containsString("failed to parse date field [invalid] with format [epoch_millis]")); - e = expectThrows(DateTimeParseException.class, () -> formatter.parse("123.1234567")); - assertThat(e.getMessage(), containsString("unparsed text found at index 3")); + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("123.1234567")); + assertThat(e.getMessage(), containsString("failed to parse date field [123.1234567] with format [epoch_millis]")); } // this is not in the duelling tests, because the epoch second parser in joda time drops the milliseconds after the comma @@ -72,14 +71,14 @@ public void testEpochMilliParser() { public void testEpochSecondParser() { DateFormatter formatter = DateFormatters.forPattern("epoch_second"); - DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.1")); - assertThat(e.getMessage(), is("Text '1234.1' could not be parsed, unparsed text found at index 4")); - e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.")); - assertThat(e.getMessage(), is("Text '1234.' could not be parsed, unparsed text found at index 4")); - e = expectThrows(DateTimeParseException.class, () -> formatter.parse("abc")); - assertThat(e.getMessage(), is("Text 'abc' could not be parsed, unparsed text found at index 0")); - e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.abc")); - assertThat(e.getMessage(), is("Text '1234.abc' could not be parsed, unparsed text found at index 4")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("1234.1234567890")); + assertThat(e.getMessage(), is("failed to parse date field [1234.1234567890] with format [epoch_second]")); + e = expectThrows(IllegalArgumentException .class, () -> formatter.parse("1234.123456789013221")); + assertThat(e.getMessage(), containsString("[1234.123456789013221]")); + e = expectThrows(IllegalArgumentException .class, () -> formatter.parse("abc")); + assertThat(e.getMessage(), containsString("[abc]")); + e = expectThrows(IllegalArgumentException .class, () -> formatter.parse("1234.abc")); + assertThat(e.getMessage(), containsString("[1234.abc]")); } public void testEpochMilliParsersWithDifferentFormatters() { @@ -139,7 +138,7 @@ public void testEqualsAndHashcode() { assertThat(epochMillisFormatter, equalTo(DateFormatters.forPattern("epoch_millis"))); } - public void testForceJava8() { + public void testSupportBackwardsJava8Format() { assertThat(DateFormatter.forPattern("8yyyy-MM-dd"), instanceOf(JavaDateFormatter.class)); // named formats too assertThat(DateFormatter.forPattern("8date_optional_time"), instanceOf(JavaDateFormatter.class)); @@ -161,27 +160,29 @@ public void testParsingStrictNanoDates() { } public void testRoundupFormatterWithEpochDates() { - assertRoundupFormatter("8epoch_millis", "1234567890", 1234567890L); + assertRoundupFormatter("epoch_millis", "1234567890", 1234567890L); // also check nanos of the epoch_millis formatter if it is rounded up to the nano second DateTimeFormatter roundUpFormatter = ((JavaDateFormatter) DateFormatter.forPattern("8epoch_millis")).getRoundupParser(); Instant epochMilliInstant = DateFormatters.toZonedDateTime(roundUpFormatter.parse("1234567890")).toInstant(); assertThat(epochMilliInstant.getLong(ChronoField.NANO_OF_SECOND), is(890_999_999L)); - assertRoundupFormatter("8strict_date_optional_time||epoch_millis", "2018-10-10T12:13:14.123Z", 1539173594123L); - assertRoundupFormatter("8strict_date_optional_time||epoch_millis", "1234567890", 1234567890L); - assertRoundupFormatter("8uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_millis", "2018-10-10T12:13:14.123", 1539173594123L); - assertRoundupFormatter("8uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_millis", "1234567890", 1234567890L); + assertRoundupFormatter("strict_date_optional_time||epoch_millis", "2018-10-10T12:13:14.123Z", 1539173594123L); + assertRoundupFormatter("strict_date_optional_time||epoch_millis", "1234567890", 1234567890L); + assertRoundupFormatter("strict_date_optional_time||epoch_millis", "2018-10-10", 1539215999999L); + assertRoundupFormatter("uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_millis", "2018-10-10T12:13:14.123", 1539173594123L); + assertRoundupFormatter("uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_millis", "1234567890", 1234567890L); - assertRoundupFormatter("8epoch_second", "1234567890", 1234567890999L); + assertRoundupFormatter("epoch_second", "1234567890", 1234567890999L); // also check nanos of the epoch_millis formatter if it is rounded up to the nano second DateTimeFormatter epochSecondRoundupParser = ((JavaDateFormatter) DateFormatter.forPattern("8epoch_second")).getRoundupParser(); Instant epochSecondInstant = DateFormatters.toZonedDateTime(epochSecondRoundupParser.parse("1234567890")).toInstant(); assertThat(epochSecondInstant.getLong(ChronoField.NANO_OF_SECOND), is(999_999_999L)); - assertRoundupFormatter("8strict_date_optional_time||epoch_second", "2018-10-10T12:13:14.123Z", 1539173594123L); - assertRoundupFormatter("8strict_date_optional_time||epoch_second", "1234567890", 1234567890999L); - assertRoundupFormatter("8uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_second", "2018-10-10T12:13:14.123", 1539173594123L); - assertRoundupFormatter("8uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_second", "1234567890", 1234567890999L); + assertRoundupFormatter("strict_date_optional_time||epoch_second", "2018-10-10T12:13:14.123Z", 1539173594123L); + assertRoundupFormatter("strict_date_optional_time||epoch_second", "1234567890", 1234567890999L); + assertRoundupFormatter("strict_date_optional_time||epoch_second", "2018-10-10", 1539215999999L); + assertRoundupFormatter("uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_second", "2018-10-10T12:13:14.123", 1539173594123L); + assertRoundupFormatter("uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_second", "1234567890", 1234567890999L); } private void assertRoundupFormatter(String format, String input, long expectedMilliSeconds) { @@ -194,8 +195,8 @@ private void assertRoundupFormatter(String format, String input, long expectedMi public void testRoundupFormatterZone() { ZoneId zoneId = randomZone(); - String format = randomFrom("8epoch_second", "8epoch_millis", "8strict_date_optional_time", "8uuuu-MM-dd'T'HH:mm:ss.SSS", - "8strict_date_optional_time||date_optional_time"); + String format = randomFrom("epoch_second", "epoch_millis", "strict_date_optional_time", "uuuu-MM-dd'T'HH:mm:ss.SSS", + "strict_date_optional_time||date_optional_time"); JavaDateFormatter formatter = (JavaDateFormatter) DateFormatter.forPattern(format).withZone(zoneId); DateTimeFormatter roundUpFormatter = formatter.getRoundupParser(); assertThat(roundUpFormatter.getZone(), is(zoneId)); @@ -204,8 +205,8 @@ public void testRoundupFormatterZone() { public void testRoundupFormatterLocale() { Locale locale = randomLocale(random()); - String format = randomFrom("8epoch_second", "8epoch_millis", "8strict_date_optional_time", "8uuuu-MM-dd'T'HH:mm:ss.SSS", - "8strict_date_optional_time||date_optional_time"); + String format = randomFrom("epoch_second", "epoch_millis", "strict_date_optional_time", "uuuu-MM-dd'T'HH:mm:ss.SSS", + "strict_date_optional_time||date_optional_time"); JavaDateFormatter formatter = (JavaDateFormatter) DateFormatter.forPattern(format).withLocale(locale); DateTimeFormatter roundupParser = formatter.getRoundupParser(); assertThat(roundupParser.getLocale(), is(locale)); diff --git a/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java b/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java index 8d702ebee8388..2b8d89bc68bae 100644 --- a/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java @@ -39,8 +39,6 @@ public class JavaDateMathParserTests extends ESTestCase { private final DateMathParser parser = formatter.toDateMathParser(); public void testBasicDates() { - assertDateMathEquals("2014", "2014-01-01T00:00:00.000"); - assertDateMathEquals("2014-05", "2014-05-01T00:00:00.000"); assertDateMathEquals("2014-05-30", "2014-05-30T00:00:00.000"); assertDateMathEquals("2014-05-30T20", "2014-05-30T20:00:00.000"); assertDateMathEquals("2014-05-30T20:21", "2014-05-30T20:21:00.000"); @@ -125,7 +123,7 @@ public void testMultipleAdjustments() { } public void testNow() { - final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null); + final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null).toEpochMilli(); assertDateMathEquals("now", "2014-11-18T14:27:32", now, false, null); assertDateMathEquals("now+M", "2014-12-18T14:27:32", now, false, null); @@ -142,11 +140,11 @@ public void testRoundingPreservesEpochAsBaseDate() { DateMathParser parser = formatter.toDateMathParser(); ZonedDateTime zonedDateTime = DateFormatters.toZonedDateTime(formatter.parse("04:52:20")); assertThat(zonedDateTime.getYear(), is(1970)); - long millisStart = zonedDateTime.toInstant().toEpochMilli(); + Instant millisStart = zonedDateTime.toInstant(); assertEquals(millisStart, parser.parse("04:52:20", () -> 0, false, (ZoneId) null)); // due to rounding up, we have to add the number of milliseconds here manually long millisEnd = DateFormatters.toZonedDateTime(formatter.parse("04:52:20")).toInstant().toEpochMilli() + 999; - assertEquals(millisEnd, parser.parse("04:52:20", () -> 0, true, (ZoneId) null)); + assertEquals(millisEnd, parser.parse("04:52:20", () -> 0, true, (ZoneId) null).toEpochMilli()); } // Implicit rounding happening when parts of the date are not specified @@ -166,9 +164,10 @@ public void testImplicitRounding() { // implicit rounding with explicit timezone in the date format DateFormatter formatter = DateFormatters.forPattern("yyyy-MM-ddXXX"); DateMathParser parser = formatter.toDateMathParser(); - long time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null); + Instant time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null); assertEquals(this.parser.parse("2011-10-09T00:00:00.000+01:00", () -> 0), time); - time = parser.parse("2011-10-09+01:00", () -> 0, true, (ZoneId) null); + time = DateFormatter.forPattern("strict_date_optional_time_nanos").toDateMathParser() + .parse("2011-10-09T23:59:59.999+01:00", () -> 0, false, (ZoneId) null); assertEquals(this.parser.parse("2011-10-09T23:59:59.999+01:00", () -> 0), time); } @@ -176,7 +175,6 @@ public void testImplicitRounding() { public void testExplicitRounding() { assertDateMathEquals("2014-11-18||/y", "2014-01-01", 0, false, null); assertDateMathEquals("2014-11-18||/y", "2014-12-31T23:59:59.999", 0, true, null); - assertDateMathEquals("2014||/y", "2014-01-01", 0, false, null); assertDateMathEquals("2014-01-01T00:00:00.001||/y", "2014-12-31T23:59:59.999", 0, true, null); // rounding should also take into account time zone assertDateMathEquals("2014-11-18||/y", "2013-12-31T23:00:00.000Z", 0, false, ZoneId.of("CET")); @@ -239,16 +237,16 @@ public void testTimestamps() { assertDateMathEquals("1418248078000||/m", "2014-12-10T21:47:00.000"); // also check other time units - DateMathParser parser = DateFormatter.forPattern("8epoch_second||dateOptionalTime").toDateMathParser(); - long datetime = parser.parse("1418248078", () -> 0); + DateMathParser parser = DateFormatter.forPattern("epoch_second||dateOptionalTime").toDateMathParser(); + long datetime = parser.parse("1418248078", () -> 0).toEpochMilli(); assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000"); // a timestamp before 10000 is a year - assertDateMathEquals("9999", "9999-01-01T00:00:00.000"); + assertDateMathEquals("9999", "1970-01-01T00:00:09.999Z"); // 10000 is also a year, breaking bwc, used to be a timestamp - assertDateMathEquals("10000", "10000-01-01T00:00:00.000"); + assertDateMathEquals("10000", "1970-01-01T00:00:10.000Z"); // but 10000 with T is still a date format - assertDateMathEquals("10000T", "10000-01-01T00:00:00.000"); + assertDateMathEquals("10000-01-01T", "10000-01-01T00:00:00.000"); } void assertParseException(String msg, String date, String exc) { @@ -266,7 +264,7 @@ public void testIllegalMathFormat() { public void testIllegalDateFormat() { assertParseException("Expected bad timestamp exception", Long.toString(Long.MAX_VALUE) + "0", "failed to parse date field"); - assertParseException("Expected bad date format exception", "123bogus", "Unrecognized chars at the end of [123bogus]"); + assertParseException("Expected bad date format exception", "123bogus", "failed to parse date field [123bogus]"); } public void testOnlyCallsNowIfNecessary() { @@ -286,12 +284,12 @@ private void assertDateMathEquals(String toTest, String expected) { } private void assertDateMathEquals(String toTest, String expected, final long now, boolean roundUp, ZoneId timeZone) { - long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone); + long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone).toEpochMilli(); assertDateEquals(gotMillis, toTest, expected); } private void assertDateEquals(long gotMillis, String original, String expected) { - long expectedMillis = parser.parse(expected, () -> 0); + long expectedMillis = parser.parse(expected, () -> 0).toEpochMilli(); if (gotMillis != expectedMillis) { ZonedDateTime zonedDateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(gotMillis), ZoneOffset.UTC); fail("Date math not equal\n" + diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index 8b437d25a8495..38b3d5a2f1ff2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -21,21 +21,23 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; import org.junit.Before; import java.io.IOException; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.Collection; import static org.hamcrest.Matchers.containsString; @@ -173,7 +175,8 @@ public void testIgnoreMalformed() throws Exception { .endObject()), XContentType.JSON)); MapperParsingException e = expectThrows(MapperParsingException.class, runnable); - assertThat(e.getCause().getMessage(), containsString("Cannot parse \"2016-03-99\"")); + assertThat(e.getCause().getMessage(), + containsString("failed to parse date field [2016-03-99] with format [strict_date_optional_time||epoch_millis]")); mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "date") @@ -217,36 +220,13 @@ public void testChangeFormat() throws IOException { assertEquals(1457654400000L, pointField.numericValue().longValue()); } - public void testFloatEpochFormat() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "date") - .field("format", "epoch_millis").endObject().endObject() - .endObject().endObject()); - - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - - assertEquals(mapping, mapper.mappingSource().toString()); - - long epochMillis = randomNonNegativeLong(); - String epochFloatValue = epochMillis + "." + randomIntBetween(0, 999); - - ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference - .bytes(XContentFactory.jsonBuilder() - .startObject() - .field("field", epochFloatValue) - .endObject()), - XContentType.JSON)); - - IndexableField[] fields = doc.rootDoc().getFields("field"); - assertEquals(2, fields.length); - IndexableField pointField = fields[0]; - assertEquals(epochMillis, pointField.numericValue().longValue()); - } - public void testChangeLocale() throws IOException { + assumeTrue("need java 9 for testing ",JavaVersion.current().compareTo(JavaVersion.parse("9")) >= 0); String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "date").field("locale", "fr").endObject().endObject() - .endObject().endObject()); + .startObject("properties").startObject("field").field("type", "date") + .field("format", "E, d MMM yyyy HH:mm:ss Z") + .field("locale", "de") + .endObject().endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -255,7 +235,7 @@ public void testChangeLocale() throws IOException { mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() - .field("field", 1457654400) + .field("field", "Mi., 06 Dez. 2000 02:55:00 -0800") .endObject()), XContentType.JSON)); } @@ -340,12 +320,8 @@ public void testEmptyName() throws IOException { assertThat(e.getMessage(), containsString("name cannot be empty string")); } - /** - * Test that time zones are correctly parsed by the {@link DateFieldMapper}. - * There is a known bug with Joda 2.9.4 reported in https://github.com/JodaOrg/joda-time/issues/373. - */ public void testTimeZoneParsing() throws Exception { - final String timeZonePattern = "yyyy-MM-dd" + randomFrom("ZZZ", "[ZZZ]", "'['ZZZ']'"); + final String timeZonePattern = "yyyy-MM-dd" + randomFrom("XXX", "[XXX]", "'['XXX']'"); String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("type") @@ -360,20 +336,22 @@ public void testTimeZoneParsing() throws Exception { DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); assertEquals(mapping, mapper.mappingSource().toString()); - final DateTimeZone randomTimeZone = randomBoolean() ? DateTimeZone.forID(randomFrom("UTC", "CET")) : randomDateTimeZone(); - final DateTime randomDate = new DateTime(2016, 03, 11, 0, 0, 0, randomTimeZone); + DateFormatter formatter = DateFormatter.forPattern(timeZonePattern); + final ZoneId randomTimeZone = randomBoolean() ? ZoneId.of(randomFrom("UTC", "CET")) : randomZone(); + final ZonedDateTime randomDate = ZonedDateTime.of(2016, 3, 11, 0, 0, 0, 0, randomTimeZone); ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference .bytes(XContentFactory.jsonBuilder() .startObject() - .field("field", DateTimeFormat.forPattern(timeZonePattern).print(randomDate)) + .field("field", formatter.format(randomDate)) .endObject()), XContentType.JSON)); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(2, fields.length); - assertEquals(randomDate.withZone(DateTimeZone.UTC).getMillis(), fields[0].numericValue().longValue()); + long millis = randomDate.withZoneSameInstant(ZoneOffset.UTC).toInstant().toEpochMilli(); + assertEquals(millis, fields[0].numericValue().longValue()); } public void testMergeDate() throws IOException { @@ -429,6 +407,6 @@ public void testIllegalFormatField() throws Exception { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping))); - assertEquals("Invalid format: [[test_format]]: expected string value", e.getMessage()); + assertEquals("Invalid format: [[test_format]]: Unknown pattern letter: t", e.getMessage()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index 072170aff09dd..d4058d50f74a2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.IndexSettings; @@ -45,6 +46,7 @@ import org.junit.Before; import java.io.IOException; +import java.time.ZoneOffset; import java.util.Locale; public class DateFieldTypeTests extends FieldTypeTestCase { @@ -67,7 +69,7 @@ public void modify(MappedFieldType ft) { addModifier(new Modifier("locale", false) { @Override public void modify(MappedFieldType ft) { - ((DateFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("date_optional_time").withLocale(Locale.CANADA)); + ((DateFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("strict_date_optional_time").withLocale(Locale.CANADA)); } }); nowInMillis = randomNonNegativeLong(); @@ -110,8 +112,10 @@ private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader, public void testIsFieldWithinQuery() throws IOException { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); - long instant1 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda("2015-10-12").getMillis(); - long instant2 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda("2016-04-03").getMillis(); + long instant1 = + DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-12")).toInstant().toEpochMilli(); + long instant2 = + DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2016-04-03")).toInstant().toEpochMilli(); Document doc = new Document(); LongPoint field = new LongPoint("my_date", instant1); doc.add(field); @@ -138,25 +142,27 @@ public void testIsFieldWithinQuery() throws IOException { public void testValueFormat() { MappedFieldType ft = createDefaultFieldType(); - long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda("2015-10-12T14:10:55").getMillis(); + long instant = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-12T14:10:55")) + .toInstant().toEpochMilli(); + assertEquals("2015-10-12T14:10:55.000Z", - ft.docValueFormat(null, DateTimeZone.UTC).format(instant)); + ft.docValueFormat(null, ZoneOffset.UTC).format(instant)); assertEquals("2015-10-12T15:10:55.000+01:00", - ft.docValueFormat(null, DateTimeZone.forOffsetHours(1)).format(instant)); + ft.docValueFormat(null, ZoneOffset.ofHours(1)).format(instant)); assertEquals("2015", - createDefaultFieldType().docValueFormat("yyyy", DateTimeZone.UTC).format(instant)); + createDefaultFieldType().docValueFormat("YYYY", ZoneOffset.UTC).format(instant)); assertEquals(instant, - ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", false, null)); + ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12T14:10:55", false, null)); assertEquals(instant + 999, - ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", true, null)); - assertEquals(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda("2015-10-13").getMillis() - 1, - ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12||/d", true, null)); + ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12T14:10:55", true, null)); + long i = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-13")).toInstant().toEpochMilli(); + assertEquals(i - 1, ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12||/d", true, null)); } public void testValueForSearch() { MappedFieldType ft = createDefaultFieldType(); String date = "2015-10-12T12:09:55.000Z"; - long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date).getMillis(); + long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date); assertEquals(date, ft.valueForDisplay(instant)); } @@ -170,7 +176,7 @@ public void testTermQuery() { MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); String date = "2015-10-12T14:10:55"; - long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date).getMillis(); + long instant = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date)).toInstant().toEpochMilli(); ft.setIndexOptions(IndexOptions.DOCS); Query expected = new IndexOrDocValuesQuery( LongPoint.newRangeQuery("field", instant, instant + 999), @@ -193,8 +199,9 @@ public void testRangeQuery() throws IOException { ft.setName("field"); String date1 = "2015-10-12T14:10:55"; String date2 = "2016-04-28T11:33:52"; - long instant1 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date1).getMillis(); - long instant2 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date2).getMillis() + 999; + long instant1 = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date1)).toInstant().toEpochMilli(); + long instant2 = + DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date2)).toInstant().toEpochMilli() + 999; ft.setIndexOptions(IndexOptions.DOCS); Query expected = new IndexOrDocValuesQuery( LongPoint.newRangeQuery("field", instant1, instant2), diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index 56e6f5e4c6b04..b3539d9994334 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; +import java.time.Instant; import java.util.Collection; import java.util.Collections; @@ -455,7 +456,7 @@ public void testReuseExistingMappings() throws IOException, Exception { .field("my_field3", 44) .field("my_field4", 45) .field("my_field5", 46) - .field("my_field6", 47) + .field("my_field6", Instant.now().toEpochMilli()) .field("my_field7", true) .endObject()); Mapper myField1Mapper = null; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index ff44dec81d962..0b066fbd7162d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -60,8 +60,6 @@ public void testMatchTypeOnly() throws Exception { assertThat(mapperService.fullName("l"), notNullValue()); assertNotSame(IndexOptions.NONE, mapperService.fullName("l").indexOptions()); - - } public void testSimple() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java index fcb78f66add5e..65dcd396ed740 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java @@ -458,7 +458,7 @@ public void testIllegalFormatField() throws Exception { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping))); - assertEquals("Invalid format: [[test_format]]: expected string value", e.getMessage()); + assertEquals("Invalid format: [[test_format]]: Unknown pattern letter: t", e.getMessage()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java index 34e7081d51d5d..699f85f1b12b1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java @@ -104,11 +104,12 @@ public void testDateRangeQuery() throws Exception { DateMathParser parser = type.dateMathParser; Query query = new QueryStringQueryBuilder(DATE_RANGE_FIELD_NAME + ":[2010-01-01 TO 2018-01-01]").toQuery(createShardContext()); Query range = LongRange.newIntersectsQuery(DATE_RANGE_FIELD_NAME, - new long[]{ parser.parse("2010-01-01", () -> 0)}, new long[]{ parser.parse("2018-01-01", () -> 0)}); + new long[]{ parser.parse("2010-01-01", () -> 0).toEpochMilli()}, + new long[]{ parser.parse("2018-01-01", () -> 0).toEpochMilli()}); Query dv = RangeFieldMapper.RangeType.DATE.dvRangeQuery(DATE_RANGE_FIELD_NAME, BinaryDocValuesRangeQuery.QueryType.INTERSECTS, - parser.parse("2010-01-01", () -> 0), - parser.parse("2018-01-01", () -> 0), true, true); + parser.parse("2010-01-01", () -> 0).toEpochMilli(), + parser.parse("2018-01-01", () -> 0).toEpochMilli(), true, true); assertEquals(new IndexOrDocValuesQuery(range, dv), query); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java index f04a193ef96b2..6ca98fb4db6d2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java @@ -48,6 +48,8 @@ import java.net.InetAddress; import java.util.Locale; +import static org.hamcrest.Matchers.containsString; + public class RangeFieldTypeTests extends FieldTypeTestCase { RangeType type; protected static String FIELDNAME = "field"; @@ -111,17 +113,18 @@ public void testDateRangeQueryUsingMappingFormat() { fieldType.setHasDocValues(false); ShapeRelation relation = randomFrom(ShapeRelation.values()); - // dates will break the default format + // dates will break the default format, month/day of month is turned around in the format final String from = "2016-15-06T15:29:50+08:00"; final String to = "2016-16-06T15:29:50+08:00"; ElasticsearchParseException ex = expectThrows(ElasticsearchParseException.class, () -> fieldType.rangeQuery(from, to, true, true, relation, null, null, context)); - assertEquals("failed to parse date field [2016-15-06T15:29:50+08:00] with format [strict_date_optional_time||epoch_millis]", - ex.getMessage()); + assertThat(ex.getMessage(), + containsString("failed to parse date field [2016-15-06T15:29:50+08:00] with format [strict_date_optional_time||epoch_millis]") + ); // setting mapping format which is compatible with those dates - final DateFormatter formatter = DateFormatter.forPattern("yyyy-dd-MM'T'HH:mm:ssZZ"); + final DateFormatter formatter = DateFormatter.forPattern("yyyy-dd-MM'T'HH:mm:ssZZZZZ"); assertEquals(1465975790000L, formatter.parseMillis(from)); assertEquals(1466062190000L, formatter.parseMillis(to)); diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 0eb6de7da252f..6f72277007dd5 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -64,9 +64,10 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.hamcrest.Matchers; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.DateTimeException; +import java.time.ZoneId; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -177,7 +178,7 @@ protected QueryStringQueryBuilder doCreateTestQueryBuilder() { queryStringQueryBuilder.minimumShouldMatch(randomMinimumShouldMatch()); } if (randomBoolean()) { - queryStringQueryBuilder.timeZone(randomDateTimeZone().getID()); + queryStringQueryBuilder.timeZone(randomZone().getId()); } if (randomBoolean()) { queryStringQueryBuilder.autoGenerateSynonymsPhraseQuery(randomBoolean()); @@ -211,7 +212,7 @@ public QueryStringQueryBuilder mutateInstance(QueryStringQueryBuilder instance) String quoteFieldSuffix = instance.quoteFieldSuffix(); Float tieBreaker = instance.tieBreaker(); String minimumShouldMatch = instance.minimumShouldMatch(); - String timeZone = instance.timeZone() == null ? null : instance.timeZone().getID(); + String timeZone = instance.timeZone() == null ? null : instance.timeZone().getId(); boolean autoGenerateSynonymsPhraseQuery = instance.autoGenerateSynonymsPhraseQuery(); boolean fuzzyTranspositions = instance.fuzzyTranspositions(); @@ -319,12 +320,12 @@ public QueryStringQueryBuilder mutateInstance(QueryStringQueryBuilder instance) break; case 20: if (timeZone == null) { - timeZone = randomDateTimeZone().getID(); + timeZone = randomZone().getId(); } else { if (randomBoolean()) { timeZone = null; } else { - timeZone = randomValueOtherThan(timeZone, () -> randomDateTimeZone().getID()); + timeZone = randomValueOtherThan(timeZone, () -> randomZone().getId()); } } break; @@ -848,7 +849,7 @@ public void testTimezone() throws Exception { QueryBuilder queryBuilder = parseQuery(queryAsString); assertThat(queryBuilder, instanceOf(QueryStringQueryBuilder.class)); QueryStringQueryBuilder queryStringQueryBuilder = (QueryStringQueryBuilder) queryBuilder; - assertThat(queryStringQueryBuilder.timeZone(), equalTo(DateTimeZone.forID("Europe/Paris"))); + assertThat(queryStringQueryBuilder.timeZone(), equalTo(ZoneId.of("Europe/Paris"))); String invalidQueryAsString = "{\n" + " \"query_string\":{\n" + @@ -856,7 +857,7 @@ public void testTimezone() throws Exception { " \"query\":\"" + DATE_FIELD_NAME + ":[2012 TO 2014]\"\n" + " }\n" + "}"; - expectThrows(IllegalArgumentException.class, () -> parseQuery(invalidQueryAsString)); + expectThrows(DateTimeException.class, () -> parseQuery(invalidQueryAsString)); } public void testToQueryBooleanQueryMultipleBoosts() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index df312ba84c309..52f2c89d645f9 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -44,10 +44,12 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import org.joda.time.chrono.ISOChronology; import java.io.IOException; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.HashMap; import java.util.Map; @@ -72,18 +74,22 @@ protected RangeQueryBuilder doCreateTestQueryBuilder() { break; case 1: // use mapped date field, using date string representation + Instant now = Instant.now(); + ZonedDateTime start = now.minusMillis(randomIntBetween(0, 1000000)).atZone(ZoneOffset.UTC); + ZonedDateTime end = now.plusMillis(randomIntBetween(0, 1000000)).atZone(ZoneOffset.UTC); query = new RangeQueryBuilder(randomFrom( DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, DATE_ALIAS_FIELD_NAME)); - query.from(new DateTime(System.currentTimeMillis() - randomIntBetween(0, 1000000), DateTimeZone.UTC).toString()); - query.to(new DateTime(System.currentTimeMillis() + randomIntBetween(0, 1000000), DateTimeZone.UTC).toString()); + query.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(start)); + query.to(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(end)); // Create timestamp option only then we have a date mapper, // otherwise we could trigger exception. if (createShardContext().getMapperService().fullName(DATE_FIELD_NAME) != null) { if (randomBoolean()) { - query.timeZone(randomDateTimeZone().getID()); + query.timeZone(randomZone().getId()); } if (randomBoolean()) { - query.format("yyyy-MM-dd'T'HH:mm:ss.SSSZZ"); + String format = "strict_date_optional_time"; + query.format(format); } } break; @@ -444,7 +450,7 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC()); query.from(queryFromValue); query.to(queryToValue); - query.timeZone(randomDateTimeZone().getID()); + query.timeZone(randomZone().getId()); query.format("yyyy-MM-dd"); QueryShardContext queryShardContext = createShardContext(); QueryBuilder rewritten = query.rewrite(queryShardContext); diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java index 6d1db852a85a4..7e13b38fd3d25 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.index.cache.request.RequestCacheStats; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; @@ -34,8 +35,8 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.test.junit.annotations.TestLogging; -import org.joda.time.DateTimeZone; +import java.time.ZoneId; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; @@ -68,7 +69,7 @@ public void testCacheAggs() throws Exception { // which used to not work well with the query cache because of the handles stream output // see #9500 final SearchResponse r1 = client.prepareSearch("index").setSize(0).setSearchType(SearchType.QUERY_THEN_FETCH) - .addAggregation(dateHistogram("histo").field("f").timeZone(DateTimeZone.forID("+01:00")).minDocCount(0) + .addAggregation(dateHistogram("histo").field("f").timeZone(ZoneId.of("+01:00")).minDocCount(0) .dateHistogramInterval(DateHistogramInterval.MONTH)) .get(); assertSearchResponse(r1); @@ -80,7 +81,7 @@ public void testCacheAggs() throws Exception { for (int i = 0; i < 10; ++i) { final SearchResponse r2 = client.prepareSearch("index").setSize(0) .setSearchType(SearchType.QUERY_THEN_FETCH).addAggregation(dateHistogram("histo").field("f") - .timeZone(DateTimeZone.forID("+01:00")).minDocCount(0).dateHistogramInterval(DateHistogramInterval.MONTH)) + .timeZone(ZoneId.of("+01:00")).minDocCount(0).dateHistogramInterval(DateHistogramInterval.MONTH)) .get(); assertSearchResponse(r2); Histogram h1 = r1.getAggregations().get("histo"); @@ -246,15 +247,16 @@ public void testQueryRewriteDatesWithNow() throws Exception { assertAcked(client.admin().indices().prepareCreate("index-3").addMapping("type", "d", "type=date") .setSettings(settings).get()); ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); - indexRandom(true, client.prepareIndex("index-1", "type", "1").setSource("d", now), - client.prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1)), - client.prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2)), - client.prepareIndex("index-2", "type", "4").setSource("d", now.minusDays(3)), - client.prepareIndex("index-2", "type", "5").setSource("d", now.minusDays(4)), - client.prepareIndex("index-2", "type", "6").setSource("d", now.minusDays(5)), - client.prepareIndex("index-3", "type", "7").setSource("d", now.minusDays(6)), - client.prepareIndex("index-3", "type", "8").setSource("d", now.minusDays(7)), - client.prepareIndex("index-3", "type", "9").setSource("d", now.minusDays(8))); + DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time"); + indexRandom(true, client.prepareIndex("index-1", "type", "1").setSource("d", formatter.format(now)), + client.prepareIndex("index-1", "type", "2").setSource("d", formatter.format(now.minusDays(1))), + client.prepareIndex("index-1", "type", "3").setSource("d", formatter.format(now.minusDays(2))), + client.prepareIndex("index-2", "type", "4").setSource("d", formatter.format(now.minusDays(3))), + client.prepareIndex("index-2", "type", "5").setSource("d", formatter.format(now.minusDays(4))), + client.prepareIndex("index-2", "type", "6").setSource("d", formatter.format(now.minusDays(5))), + client.prepareIndex("index-3", "type", "7").setSource("d", formatter.format(now.minusDays(6))), + client.prepareIndex("index-3", "type", "8").setSource("d", formatter.format(now.minusDays(7))), + client.prepareIndex("index-3", "type", "9").setSource("d", formatter.format(now.minusDays(8)))); ensureSearchable("index-1", "index-2", "index-3"); assertCacheState(client, "index-1", 0, 0); assertCacheState(client, "index-2", 0, 0); diff --git a/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java b/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java index 81f5c7982d4d8..4486d4ff83ffb 100644 --- a/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java +++ b/server/src/test/java/org/elasticsearch/search/DocValueFormatTests.java @@ -29,8 +29,8 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTimeZone; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.List; @@ -60,15 +60,15 @@ public void testSerialization() throws Exception { assertEquals(DocValueFormat.Decimal.class, vf.getClass()); assertEquals("###.##", ((DocValueFormat.Decimal) vf).pattern); - DocValueFormat.DateTime dateFormat = - new DocValueFormat.DateTime(DateFormatter.forPattern("epoch_second"), DateTimeZone.forOffsetHours(1)); + DateFormatter formatter = DateFormatter.forPattern("epoch_second"); + DocValueFormat.DateTime dateFormat = new DocValueFormat.DateTime(formatter, ZoneOffset.ofHours(1)); out = new BytesStreamOutput(); out.writeNamedWriteable(dateFormat); in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry); vf = in.readNamedWriteable(DocValueFormat.class); assertEquals(DocValueFormat.DateTime.class, vf.getClass()); assertEquals("epoch_second", ((DocValueFormat.DateTime) vf).formatter.pattern()); - assertEquals(DateTimeZone.forOffsetHours(1), ((DocValueFormat.DateTime) vf).timeZone); + assertEquals(ZoneOffset.ofHours(1), ((DocValueFormat.DateTime) vf).timeZone); out = new BytesStreamOutput(); out.writeNamedWriteable(DocValueFormat.GEOHASH); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java index 3a10edf183376..a54f30ffac0d1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java @@ -36,7 +36,7 @@ protected AutoDateHistogramAggregationBuilder createTestAggregatorBuilder() { builder.missing(randomIntBetween(0, 10)); } if (randomBoolean()) { - builder.timeZone(randomDateTimeZone()); + builder.timeZone(randomZone()); } return builder; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index f65f2bde9662a..c59be546acd1a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -22,12 +22,12 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.time.DateMathParser; -import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.query.MatchNoneQueryBuilder; @@ -46,13 +46,14 @@ import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; import org.junit.After; import java.io.IOException; +import java.time.Instant; import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -83,21 +84,21 @@ @ESIntegTestCase.SuiteScopeTestCase public class DateHistogramIT extends ESIntegTestCase { - static Map> expectedMultiSortBuckets; + static Map> expectedMultiSortBuckets; - private DateTime date(int month, int day) { - return new DateTime(2012, month, day, 0, 0, DateTimeZone.UTC); + private ZonedDateTime date(int month, int day) { + return ZonedDateTime.of(2012, month, day, 0, 0, 0, 0, ZoneOffset.UTC); } - private DateTime date(String date) { - return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date); + private ZonedDateTime date(String date) { + return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date)); } - private static String format(DateTime date, String pattern) { - return DateTimeFormat.forPattern(pattern).print(date); + private static String format(ZonedDateTime date, String pattern) { + return DateFormatter.forPattern(pattern).format(date); } - private IndexRequestBuilder indexDoc(String idx, DateTime date, int value) throws Exception { + private IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception { return client().prepareIndex(idx, "type").setSource(jsonBuilder() .startObject() .timeField("date", date) @@ -142,7 +143,7 @@ public void setupSuiteScopeCluster() throws Exception { ensureSearchable(); } - private void addExpectedBucket(DateTime key, long docCount, double avg, double sum) { + private void addExpectedBucket(ZonedDateTime key, long docCount, double avg, double sum) { Map bucketProps = new HashMap<>(); bucketProps.put("_count", docCount); bucketProps.put("avg_l", avg); @@ -196,13 +197,12 @@ public void afterEachTest() throws IOException { internalCluster().wipeIndices("idx2"); } - private static String getBucketKeyAsString(DateTime key) { - return getBucketKeyAsString(key, DateTimeZone.UTC); + private static String getBucketKeyAsString(ZonedDateTime key) { + return getBucketKeyAsString(key, ZoneOffset.UTC); } - private static String getBucketKeyAsString(DateTime key, DateTimeZone tz) { - ZoneId zoneId = DateUtils.dateTimeZoneToZoneId(tz); - return DateFormatter.forPattern(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.pattern()).withZone(zoneId).formatJoda(key); + private static String getBucketKeyAsString(ZonedDateTime key, ZoneId tz) { + return DateFormatter.forPattern("strict_date_optional_time").withZone(tz).format(key); } public void testSingleValuedField() throws Exception { @@ -218,35 +218,34 @@ public void testSingleValuedField() throws Exception { List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(3)); - DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC); + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); - key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(2L)); - key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); } public void testSingleValuedFieldWithTimeZone() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("date") - .dateHistogramInterval(DateHistogramInterval.DAY) - .minDocCount(1) - .timeZone(DateTimeZone.forID("+01:00"))).get(); - DateTimeZone tz = DateTimeZone.forID("+01:00"); + .addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(1) + .timeZone(ZoneId.of("+01:00"))).execute() + .actionGet(); + ZoneId tz = ZoneId.of("+01:00"); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); @@ -255,46 +254,46 @@ public void testSingleValuedFieldWithTimeZone() throws Exception { List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(6)); - DateTime key = new DateTime(2012, 1, 1, 23, 0, DateTimeZone.UTC); + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 23, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); - key = new DateTime(2012, 2, 1, 23, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 2, 1, 23, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); - key = new DateTime(2012, 2, 14, 23, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 2, 14, 23, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); - key = new DateTime(2012, 3, 1, 23, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 3, 1, 23, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(3); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); - key = new DateTime(2012, 3, 14, 23, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 3, 14, 23, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(4); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); - key = new DateTime(2012, 3, 22, 23, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 3, 22, 23, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(5); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); } @@ -304,7 +303,7 @@ public void testSingleValued_timeZone_epoch() throws Exception { if (randomBoolean()) { format = format + "||date_optional_time"; } - DateTimeZone tz = DateTimeZone.forID("+01:00"); + ZoneId tz = ZoneId.of("+01:00"); SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo").field("date") .dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(1) @@ -318,21 +317,25 @@ public void testSingleValued_timeZone_epoch() throws Exception { List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(6)); - List expectedKeys = new ArrayList<>(); - expectedKeys.add(new DateTime(2012, 1, 1, 23, 0, DateTimeZone.UTC)); - expectedKeys.add(new DateTime(2012, 2, 1, 23, 0, DateTimeZone.UTC)); - expectedKeys.add(new DateTime(2012, 2, 14, 23, 0, DateTimeZone.UTC)); - expectedKeys.add(new DateTime(2012, 3, 1, 23, 0, DateTimeZone.UTC)); - expectedKeys.add(new DateTime(2012, 3, 14, 23, 0, DateTimeZone.UTC)); - expectedKeys.add(new DateTime(2012, 3, 22, 23, 0, DateTimeZone.UTC)); - + List expectedKeys = new ArrayList<>(); + expectedKeys.add(ZonedDateTime.of(2012, 1, 1, 23, 0, 0, 0, ZoneOffset.UTC)); + expectedKeys.add(ZonedDateTime.of(2012, 2, 1, 23, 0, 0, 0, ZoneOffset.UTC)); + expectedKeys.add(ZonedDateTime.of(2012, 2, 14, 23, 0, 0, 0, ZoneOffset.UTC)); + expectedKeys.add(ZonedDateTime.of(2012, 3, 1, 23, 0, 0, 0, ZoneOffset.UTC)); + expectedKeys.add(ZonedDateTime.of(2012, 3, 14, 23, 0, 0, 0, ZoneOffset.UTC)); + expectedKeys.add(ZonedDateTime.of(2012, 3, 22, 23, 0, 0, 0, ZoneOffset.UTC)); - Iterator keyIterator = expectedKeys.iterator(); + Iterator keyIterator = expectedKeys.iterator(); for (Histogram.Bucket bucket : buckets) { assertThat(bucket, notNullValue()); - DateTime expectedKey = keyIterator.next(); - assertThat(bucket.getKeyAsString(), equalTo(Long.toString(expectedKey.getMillis() / millisDivider))); - assertThat(((DateTime) bucket.getKey()), equalTo(expectedKey)); + ZonedDateTime expectedKey = keyIterator.next(); + String bucketKey = bucket.getKeyAsString(); + String expectedBucketName = Long.toString(expectedKey.toInstant().toEpochMilli() / millisDivider); + if (JavaVersion.current().getVersion().get(0) == 8 && bucket.getKeyAsString().endsWith(".0")) { + expectedBucketName = expectedBucketName + ".0"; + } + assertThat(bucketKey, equalTo(expectedBucketName)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(expectedKey)); assertThat(bucket.getDocCount(), equalTo(1L)); } } @@ -355,7 +358,7 @@ public void testSingleValuedFieldOrderedByKeyAsc() throws Exception { int i = 0; for (Histogram.Bucket bucket : buckets) { - assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC))); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))); i++; } } @@ -377,7 +380,7 @@ public void testSingleValuedFieldOrderedByKeyDesc() throws Exception { int i = 2; for (Histogram.Bucket bucket : histo.getBuckets()) { - assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC))); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))); i--; } } @@ -399,7 +402,7 @@ public void testSingleValuedFieldOrderedByCountAsc() throws Exception { int i = 0; for (Histogram.Bucket bucket : histo.getBuckets()) { - assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC))); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))); i++; } } @@ -421,7 +424,7 @@ public void testSingleValuedFieldOrderedByCountDesc() throws Exception { int i = 2; for (Histogram.Bucket bucket : histo.getBuckets()) { - assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC))); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))); i--; } } @@ -444,42 +447,42 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)histo).getProperty("_count"); Object[] propertiesCounts = (Object[]) ((InternalAggregation)histo).getProperty("sum.value"); - DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC); + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); assertThat(sum.getValue(), equalTo(1.0)); - assertThat((DateTime) propertiesKeys[0], equalTo(key)); + assertThat((ZonedDateTime) propertiesKeys[0], equalTo(key)); assertThat((long) propertiesDocCounts[0], equalTo(1L)); assertThat((double) propertiesCounts[0], equalTo(1.0)); - key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(2L)); sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); assertThat(sum.getValue(), equalTo(5.0)); - assertThat((DateTime) propertiesKeys[1], equalTo(key)); + assertThat((ZonedDateTime) propertiesKeys[1], equalTo(key)); assertThat((long) propertiesDocCounts[1], equalTo(2L)); assertThat((double) propertiesCounts[1], equalTo(5.0)); - key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); assertThat(sum.getValue(), equalTo(15.0)); - assertThat((DateTime) propertiesKeys[2], equalTo(key)); + assertThat((ZonedDateTime) propertiesKeys[2], equalTo(key)); assertThat((long) propertiesDocCounts[2], equalTo(3L)); assertThat((double) propertiesCounts[2], equalTo(15.0)); } @@ -502,7 +505,7 @@ public void testSingleValuedFieldOrderedBySubAggregationAsc() throws Exception { int i = 0; for (Histogram.Bucket bucket : histo.getBuckets()) { - assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC))); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))); i++; } } @@ -525,7 +528,7 @@ public void testSingleValuedFieldOrderedBySubAggregationDesc() throws Exception int i = 2; for (Histogram.Bucket bucket : histo.getBuckets()) { - assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC))); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))); i--; } } @@ -548,7 +551,7 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationDesc() throws int i = 2; for (Histogram.Bucket bucket : histo.getBuckets()) { - assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC))); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))); i--; } } @@ -625,25 +628,25 @@ public void testSingleValuedFieldWithValueScript() throws Exception { List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(3)); - DateTime key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC); + ZonedDateTime key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); - key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(2L)); - key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); } @@ -669,32 +672,32 @@ public void testMultiValuedField() throws Exception { List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(4)); - DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC); + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); - key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); - key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(5L)); - key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(3); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); } @@ -763,32 +766,32 @@ public void testMultiValuedFieldWithValueScript() throws Exception { List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(4)); - DateTime key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC); + ZonedDateTime key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); - key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); - key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(5L)); - key = new DateTime(2012, 5, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 5, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(3); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); } @@ -817,25 +820,25 @@ public void testScriptSingleValue() throws Exception { List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(3)); - DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC); + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); - key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(2L)); - key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); } @@ -856,32 +859,32 @@ public void testScriptMultiValued() throws Exception { List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(4)); - DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC); + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); - key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); - key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(5L)); - key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(3); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); } @@ -922,25 +925,25 @@ public void testPartiallyUnmapped() throws Exception { List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(3)); - DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC); + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); - key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(2L)); - key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); } @@ -971,7 +974,7 @@ public void testEmptyAggregation() throws Exception { public void testSingleValueWithTimeZone() throws Exception { prepareCreate("idx2").addMapping("type", "date", "type=date").get(); IndexRequestBuilder[] reqs = new IndexRequestBuilder[5]; - DateTime date = date("2014-03-11T00:00:00+00:00"); + ZonedDateTime date = date("2014-03-11T00:00:00+00:00"); for (int i = 0; i < reqs.length; i++) { reqs[i] = client().prepareIndex("idx2", "type", "" + i) .setSource(jsonBuilder().startObject().timeField("date", date).endObject()); @@ -983,9 +986,9 @@ public void testSingleValueWithTimeZone() throws Exception { .setQuery(matchAllQuery()) .addAggregation(dateHistogram("date_histo") .field("date") - .timeZone(DateTimeZone.forID("-02:00")) + .timeZone(ZoneId.of("-02:00")) .dateHistogramInterval(DateHistogramInterval.DAY) - .format("yyyy-MM-dd:HH-mm-ssZZ")) + .format("yyyy-MM-dd:HH-mm-ssZZZZZ")) .get(); assertThat(response.getHits().getTotalHits().value, equalTo(5L)); @@ -1010,8 +1013,9 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception { // we're testing on days, so the base must be rounded to a day int interval = randomIntBetween(1, 2); // in days long intervalMillis = interval * 24 * 60 * 60 * 1000; - DateTime base = new DateTime(DateTimeZone.UTC).dayOfMonth().roundFloorCopy(); - DateTime baseKey = new DateTime(intervalMillis * (base.getMillis() / intervalMillis), DateTimeZone.UTC); + ZonedDateTime base = ZonedDateTime.now(ZoneOffset.UTC).withDayOfMonth(1); + ZonedDateTime baseKey = Instant.ofEpochMilli(intervalMillis * (base.toInstant().toEpochMilli() / intervalMillis)) + .atZone(ZoneOffset.UTC); prepareCreate("idx2") .setSettings( @@ -1028,7 +1032,7 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception { } else { int docCount = randomIntBetween(1, 3); for (int j = 0; j < docCount; j++) { - DateTime date = baseKey.plusDays(i * interval + randomIntBetween(0, interval - 1)); + ZonedDateTime date = baseKey.plusDays(i * interval + randomIntBetween(0, interval - 1)); builders.add(indexDoc("idx2", date, j)); } docCounts[i] = docCount; @@ -1037,19 +1041,19 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception { indexRandom(true, builders); ensureSearchable("idx2"); - DateTime lastDataBucketKey = baseKey.plusDays((numOfBuckets - 1) * interval); + ZonedDateTime lastDataBucketKey = baseKey.plusDays((numOfBuckets - 1) * interval); // randomizing the number of buckets on the min bound // (can sometimes fall within the data range, but more frequently will fall before the data range) int addedBucketsLeft = randomIntBetween(0, numOfBuckets); - DateTime boundsMinKey; + ZonedDateTime boundsMinKey; if (frequently()) { boundsMinKey = baseKey.minusDays(addedBucketsLeft * interval); } else { boundsMinKey = baseKey.plusDays(addedBucketsLeft * interval); addedBucketsLeft = 0; } - DateTime boundsMin = boundsMinKey.plusDays(randomIntBetween(0, interval - 1)); + ZonedDateTime boundsMin = boundsMinKey.plusDays(randomIntBetween(0, interval - 1)); // randomizing the number of buckets on the max bound // (can sometimes fall within the data range, but more frequently will fall after the data range) @@ -1059,8 +1063,8 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception { addedBucketsRight = 0; boundsMaxKeyDelta = -boundsMaxKeyDelta; } - DateTime boundsMaxKey = lastDataBucketKey.plusDays(boundsMaxKeyDelta); - DateTime boundsMax = boundsMaxKey.plusDays(randomIntBetween(0, interval - 1)); + ZonedDateTime boundsMaxKey = lastDataBucketKey.plusDays(boundsMaxKeyDelta); + ZonedDateTime boundsMax = boundsMaxKey.plusDays(randomIntBetween(0, interval - 1)); // it could be that the random bounds.min we chose ended up greater than // bounds.max - this should @@ -1105,11 +1109,11 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception { List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(bucketsCount)); - DateTime key = baseKey.isBefore(boundsMinKey) ? baseKey : boundsMinKey; + ZonedDateTime key = baseKey.isBefore(boundsMinKey) ? baseKey : boundsMinKey; for (int i = 0; i < bucketsCount; i++) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getKeyAsString(), equalTo(format(key, pattern))); assertThat(bucket.getDocCount(), equalTo(extendedValueCounts[i])); key = key.plusDays(interval); @@ -1126,15 +1130,15 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception { .setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1).put("index.number_of_replicas", 0)) .get(); - DateMathParser parser = Joda.getStrictStandardDateFormatter().toDateMathParser(); + DateMathParser parser = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis").toDateMathParser(); // we pick a random timezone offset of +12/-12 hours and insert two documents // one at 00:00 in that time zone and one at 12:00 List builders = new ArrayList<>(); int timeZoneHourOffset = randomIntBetween(-12, 12); - DateTimeZone timezone = DateTimeZone.forOffsetHours(timeZoneHourOffset); - DateTime timeZoneStartToday = new DateTime(parser.parse("now/d", System::currentTimeMillis, false, timezone), DateTimeZone.UTC); - DateTime timeZoneNoonToday = new DateTime(parser.parse("now/d+12h", System::currentTimeMillis, false, timezone), DateTimeZone.UTC); + ZoneId timezone = ZoneOffset.ofHours(timeZoneHourOffset); + ZonedDateTime timeZoneStartToday = parser.parse("now/d", System::currentTimeMillis, false, timezone).atZone(ZoneOffset.UTC); + ZonedDateTime timeZoneNoonToday = parser.parse("now/d+12h", System::currentTimeMillis, false, timezone).atZone(ZoneOffset.UTC); builders.add(indexDoc(index, timeZoneStartToday, 1)); builders.add(indexDoc(index, timeZoneNoonToday, 2)); indexRandom(true, builders); @@ -1145,7 +1149,7 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception { response = client() .prepareSearch(index) .setQuery(QueryBuilders.rangeQuery("date") - .from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getID())) + .from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getId())) .addAggregation( dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.hours(1)) .timeZone(timezone).minDocCount(0).extendedBounds(new ExtendedBounds("now/d", "now/d+23h")) @@ -1164,8 +1168,8 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception { for (int i = 0; i < buckets.size(); i++) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); - assertThat("InternalBucket " + i + " had wrong key", (DateTime) bucket.getKey(), - equalTo(new DateTime(timeZoneStartToday.getMillis() + (i * 60 * 60 * 1000), DateTimeZone.UTC))); + ZonedDateTime zonedDateTime = timeZoneStartToday.plus(i * 60 * 60 * 1000, ChronoUnit.MILLIS); + assertThat("InternalBucket " + i + " had wrong key", (ZonedDateTime) bucket.getKey(), equalTo(zonedDateTime)); if (i == 0 || i == 12) { assertThat(bucket.getDocCount(), equalTo(1L)); } else { @@ -1186,10 +1190,11 @@ public void testSingleValueFieldWithExtendedBoundsOffset() throws Exception { .get(); List builders = new ArrayList<>(); - builders.add(indexDoc(index, DateTime.parse("2016-01-03T08:00:00.000Z"), 1)); - builders.add(indexDoc(index, DateTime.parse("2016-01-03T08:00:00.000Z"), 2)); - builders.add(indexDoc(index, DateTime.parse("2016-01-06T08:00:00.000Z"), 3)); - builders.add(indexDoc(index, DateTime.parse("2016-01-06T08:00:00.000Z"), 4)); + DateFormatter formatter = DateFormatter.forPattern("date_optional_time"); + builders.add(indexDoc(index, DateFormatters.toZonedDateTime(formatter.parse("2016-01-03T08:00:00.000Z")), 1)); + builders.add(indexDoc(index, DateFormatters.toZonedDateTime(formatter.parse("2016-01-03T08:00:00.000Z")), 2)); + builders.add(indexDoc(index, DateFormatters.toZonedDateTime(formatter.parse("2016-01-06T08:00:00.000Z")), 3)); + builders.add(indexDoc(index, DateFormatters.toZonedDateTime(formatter.parse("2016-01-06T08:00:00.000Z")), 4)); indexRandom(true, builders); ensureSearchable(index); @@ -1233,7 +1238,7 @@ public void testSingleValueFieldWithExtendedBoundsOffset() throws Exception { public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception { String mappingJson = Strings.toString(jsonBuilder().startObject() .startObject("type").startObject("properties") - .startObject("date").field("type", "date").field("format", "dateOptionalTime||dd-MM-yyyy") + .startObject("date").field("type", "date").field("format", "strict_date_optional_time||dd-MM-yyyy") .endObject().endObject().endObject().endObject()); prepareCreate("idx2").addMapping("type", mappingJson, XContentType.JSON).get(); IndexRequestBuilder[] reqs = new IndexRequestBuilder[5]; @@ -1256,23 +1261,23 @@ public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(1)); - DateTime key = new DateTime(2014, 3, 10, 0, 0, DateTimeZone.UTC); + ZonedDateTime key = ZonedDateTime.of(2014, 3, 10, 0, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(5L)); } public void testIssue6965() { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("date").timeZone(DateTimeZone.forID("+01:00")) - .dateHistogramInterval(DateHistogramInterval.MONTH).minDocCount(0)) + .addAggregation(dateHistogram("histo").field("date").timeZone(ZoneId.of("+01:00")) + .dateHistogramInterval(DateHistogramInterval.MONTH).minDocCount(0)) .get(); assertSearchResponse(response); - DateTimeZone tz = DateTimeZone.forID("+01:00"); + ZoneId tz = ZoneId.of("+01:00"); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -1280,25 +1285,25 @@ public void testIssue6965() { List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(3)); - DateTime key = new DateTime(2011, 12, 31, 23, 0, DateTimeZone.UTC); + ZonedDateTime key = ZonedDateTime.of(2011, 12, 31, 23, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); - key = new DateTime(2012, 1, 31, 23, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 1, 31, 23, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(2L)); - key = new DateTime(2012, 2, 29, 23, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 2, 29, 23, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); } @@ -1309,7 +1314,7 @@ public void testDSTBoundaryIssue9491() throws InterruptedException, ExecutionExc ensureSearchable("test9491"); SearchResponse response = client().prepareSearch("test9491") .addAggregation(dateHistogram("histo").field("d").dateHistogramInterval(DateHistogramInterval.YEAR) - .timeZone(DateTimeZone.forID("Asia/Jerusalem"))) + .timeZone(ZoneId.of("Asia/Jerusalem")).format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX")) .get(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); @@ -1327,8 +1332,8 @@ public void testIssue8209() throws InterruptedException, ExecutionException { ensureSearchable("test8209"); SearchResponse response = client().prepareSearch("test8209") .addAggregation(dateHistogram("histo").field("d").dateHistogramInterval(DateHistogramInterval.MONTH) - .timeZone(DateTimeZone.forID("CET")) - .minDocCount(0)) + .format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX") + .timeZone(ZoneId.of("CET")).minDocCount(0)) .get(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); @@ -1371,7 +1376,7 @@ public void testFormatIndexUnmapped() throws InterruptedException, ExecutionExce SearchResponse response = client().prepareSearch(indexDateUnmapped) .addAggregation( - dateHistogram("histo").field("dateField").dateHistogramInterval(DateHistogramInterval.MONTH).format("YYYY-MM") + dateHistogram("histo").field("dateField").dateHistogramInterval(DateHistogramInterval.MONTH).format("yyyy-MM") .minDocCount(0).extendedBounds(new ExtendedBounds("2018-01", "2018-01"))) .get(); assertSearchResponse(response); @@ -1393,15 +1398,19 @@ public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException, indexRandom(true, client().prepareIndex(index, "type").setSource("d", "1477954800000")); ensureSearchable(index); SearchResponse response = client().prepareSearch(index).addAggregation(dateHistogram("histo").field("d") - .dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(DateTimeZone.forID("Europe/Berlin"))).get(); + .dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("Europe/Berlin"))).get(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), equalTo(1)); - assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("1477954800000")); + if (JavaVersion.current().getVersion().get(0) == 8 && histo.getBuckets().get(0).getKeyAsString().endsWith(".0")) { + assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("1477954800000.0")); + } else { + assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("1477954800000")); + } assertThat(histo.getBuckets().get(0).getDocCount(), equalTo(1L)); response = client().prepareSearch(index).addAggregation(dateHistogram("histo").field("d") - .dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(DateTimeZone.forID("Europe/Berlin")).format("yyyy-MM-dd")) + .dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("Europe/Berlin")).format("yyyy-MM-dd")) .get(); assertSearchResponse(response); histo = response.getAggregations().get("histo"); @@ -1422,7 +1431,7 @@ public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException, public void testDSTEndTransition() throws Exception { SearchResponse response = client().prepareSearch("idx") .setQuery(new MatchNoneQueryBuilder()) - .addAggregation(dateHistogram("histo").field("date").timeZone(DateTimeZone.forID("Europe/Oslo")) + .addAggregation(dateHistogram("histo").field("date").timeZone(ZoneId.of("Europe/Oslo")) .dateHistogramInterval(DateHistogramInterval.HOUR).minDocCount(0).extendedBounds( new ExtendedBounds("2015-10-25T02:00:00.000+02:00", "2015-10-25T04:00:00.000+01:00"))) .get(); @@ -1430,9 +1439,12 @@ public void testDSTEndTransition() throws Exception { Histogram histo = response.getAggregations().get("histo"); List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(4)); - assertThat(((DateTime) buckets.get(1).getKey()).getMillis() - ((DateTime) buckets.get(0).getKey()).getMillis(), equalTo(3600000L)); - assertThat(((DateTime) buckets.get(2).getKey()).getMillis() - ((DateTime) buckets.get(1).getKey()).getMillis(), equalTo(3600000L)); - assertThat(((DateTime) buckets.get(3).getKey()).getMillis() - ((DateTime) buckets.get(2).getKey()).getMillis(), equalTo(3600000L)); + assertThat(((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli() - + ((ZonedDateTime) buckets.get(0).getKey()).toInstant().toEpochMilli(), equalTo(3600000L)); + assertThat(((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli() - + ((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli(), equalTo(3600000L)); + assertThat(((ZonedDateTime) buckets.get(3).getKey()).toInstant().toEpochMilli() - + ((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli(), equalTo(3600000L)); } /** @@ -1443,8 +1455,10 @@ public void testDontCacheScripts() throws Exception { assertAcked(prepareCreate("cache_test_idx").addMapping("type", "d", "type=date") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get()); - indexRandom(true, client().prepareIndex("cache_test_idx", "type", "1").setSource("d", date(1, 1)), - client().prepareIndex("cache_test_idx", "type", "2").setSource("d", date(2, 1))); + String date = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(date(1, 1)); + String date2 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(date(2, 1)); + indexRandom(true, client().prepareIndex("cache_test_idx", "type", "1").setSource("d", date), + client().prepareIndex("cache_test_idx", "type", "2").setSource("d", date2)); // Make sure we are starting with a clear cache assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() @@ -1514,7 +1528,7 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound } private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) { - DateTime[] expectedKeys = Arrays.stream(expectedDays).mapToObj(d -> date(1, d)).toArray(DateTime[]::new); + ZonedDateTime[] expectedKeys = Arrays.stream(expectedDays).mapToObj(d -> date(1, d)).toArray(ZonedDateTime[]::new); SearchResponse response = client() .prepareSearch("sort_idx") .setTypes("type") @@ -1544,7 +1558,7 @@ private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) { } } - private DateTime key(Histogram.Bucket bucket) { - return (DateTime) bucket.getKey(); + private ZonedDateTime key(Histogram.Bucket bucket) { + return (ZonedDateTime) bucket.getKey(); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java index 44a9f8c2cb126..080c4faffd696 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -20,18 +20,19 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.test.ESIntegTestCase; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import org.junit.After; import org.junit.Before; import java.io.IOException; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.List; -import java.util.concurrent.ExecutionException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; @@ -50,9 +51,10 @@ public class DateHistogramOffsetIT extends ESIntegTestCase { private static final String DATE_FORMAT = "yyyy-MM-dd:hh-mm-ss"; + private static final DateFormatter FORMATTER = DateFormatter.forPattern(DATE_FORMAT); - private DateTime date(String date) { - return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date); + private ZonedDateTime date(String date) { + return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date)); } @Before @@ -65,8 +67,9 @@ public void afterEachTest() throws IOException { internalCluster().wipeIndices("idx2"); } - private void prepareIndex(DateTime date, int numHours, int stepSizeHours, int idxIdStart) - throws IOException, InterruptedException, ExecutionException { + private void prepareIndex(ZonedDateTime date, int numHours, int stepSizeHours, int idxIdStart) + throws IOException, InterruptedException { + IndexRequestBuilder[] reqs = new IndexRequestBuilder[numHours]; for (int i = idxIdStart; i < idxIdStart + reqs.length; i++) { reqs[i - idxIdStart] = client().prepareIndex("idx2", "type", "" + i) @@ -94,8 +97,8 @@ public void testSingleValueWithPositiveOffset() throws Exception { List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(2)); - checkBucketFor(buckets.get(0), new DateTime(2014, 3, 10, 2, 0, DateTimeZone.UTC), 2L); - checkBucketFor(buckets.get(1), new DateTime(2014, 3, 11, 2, 0, DateTimeZone.UTC), 3L); + checkBucketFor(buckets.get(0), ZonedDateTime.of(2014, 3, 10, 2, 0, 0, 0, ZoneOffset.UTC), 2L); + checkBucketFor(buckets.get(1), ZonedDateTime.of(2014, 3, 11, 2, 0, 0, 0, ZoneOffset.UTC), 3L); } public void testSingleValueWithNegativeOffset() throws Exception { @@ -116,8 +119,8 @@ public void testSingleValueWithNegativeOffset() throws Exception { List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(2)); - checkBucketFor(buckets.get(0), new DateTime(2014, 3, 9, 22, 0, DateTimeZone.UTC), 2L); - checkBucketFor(buckets.get(1), new DateTime(2014, 3, 10, 22, 0, DateTimeZone.UTC), 3L); + checkBucketFor(buckets.get(0), ZonedDateTime.of(2014, 3, 9, 22, 0, 0, 0, ZoneOffset.UTC), 2L); + checkBucketFor(buckets.get(1), ZonedDateTime.of(2014, 3, 10, 22, 0, 0, 0, ZoneOffset.UTC), 3L); } /** @@ -143,11 +146,11 @@ public void testSingleValueWithOffsetMinDocCount() throws Exception { List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(5)); - checkBucketFor(buckets.get(0), new DateTime(2014, 3, 10, 6, 0, DateTimeZone.UTC), 6L); - checkBucketFor(buckets.get(1), new DateTime(2014, 3, 11, 6, 0, DateTimeZone.UTC), 6L); - checkBucketFor(buckets.get(2), new DateTime(2014, 3, 12, 6, 0, DateTimeZone.UTC), 0L); - checkBucketFor(buckets.get(3), new DateTime(2014, 3, 13, 6, 0, DateTimeZone.UTC), 6L); - checkBucketFor(buckets.get(4), new DateTime(2014, 3, 14, 6, 0, DateTimeZone.UTC), 6L); + checkBucketFor(buckets.get(0), ZonedDateTime.of(2014, 3, 10, 6, 0, 0, 0, ZoneOffset.UTC), 6L); + checkBucketFor(buckets.get(1), ZonedDateTime.of(2014, 3, 11, 6, 0, 0, 0, ZoneOffset.UTC), 6L); + checkBucketFor(buckets.get(2), ZonedDateTime.of(2014, 3, 12, 6, 0, 0, 0, ZoneOffset.UTC), 0L); + checkBucketFor(buckets.get(3), ZonedDateTime.of(2014, 3, 13, 6, 0, 0, 0, ZoneOffset.UTC), 6L); + checkBucketFor(buckets.get(4), ZonedDateTime.of(2014, 3, 14, 6, 0, 0, 0, ZoneOffset.UTC), 6L); } /** @@ -155,10 +158,10 @@ public void testSingleValueWithOffsetMinDocCount() throws Exception { * @param key the expected key * @param expectedSize the expected size of the bucket */ - private static void checkBucketFor(Histogram.Bucket bucket, DateTime key, long expectedSize) { + private static void checkBucketFor(Histogram.Bucket bucket, ZonedDateTime key, long expectedSize) { assertThat(bucket, notNullValue()); - assertThat(bucket.getKeyAsString(), equalTo(key.toString(DATE_FORMAT))); - assertThat(((DateTime) bucket.getKey()), equalTo(key)); + assertThat(bucket.getKeyAsString(), equalTo(FORMATTER.format(key))); + assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(expectedSize)); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index 91834de935b4f..f50c0bfd072b1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -18,9 +18,11 @@ */ package org.elasticsearch.search.aggregations.bucket; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; @@ -33,9 +35,10 @@ import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -70,12 +73,12 @@ private static IndexRequestBuilder indexDoc(int month, int day, int value) throw .endObject()); } - private static DateTime date(int month, int day) { - return date(month, day, DateTimeZone.UTC); + private static ZonedDateTime date(int month, int day) { + return date(month, day, ZoneOffset.UTC); } - private static DateTime date(int month, int day, DateTimeZone timezone) { - return new DateTime(2012, month, day, 0, 0, timezone); + private static ZonedDateTime date(int month, int day, ZoneId timezone) { + return ZonedDateTime.of(2012, month, day, 0, 0, 0, 0, timezone); } private static int numDocs; @@ -128,7 +131,7 @@ public void testDateMath() throws Exception { .prepareSearch("idx") .addAggregation( rangeBuilder.addUnboundedTo("a long time ago", "now-50y").addRange("recently", "now-50y", "now-1y") - .addUnboundedFrom("last year", "now-1y").timeZone(DateTimeZone.forID("EST"))).get(); + .addUnboundedFrom("last year", "now-1y").timeZone(ZoneId.of("Etc/GMT+5"))).get(); assertSearchResponse(response); @@ -176,8 +179,8 @@ public void testSingleValueField() throws Exception { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), nullValue()); - assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), nullValue()); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15))); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -185,8 +188,8 @@ public void testSingleValueField() throws Exception { bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15))); - assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15))); assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -194,8 +197,8 @@ public void testSingleValueField() throws Exception { bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15))); - assertThat(((DateTime) bucket.getTo()), nullValue()); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), nullValue()); assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(numDocs - 4L)); @@ -222,8 +225,8 @@ public void testSingleValueFieldWithStringDates() throws Exception { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), nullValue()); - assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), nullValue()); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15))); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -231,8 +234,8 @@ public void testSingleValueFieldWithStringDates() throws Exception { bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15))); - assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15))); assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -240,8 +243,8 @@ public void testSingleValueFieldWithStringDates() throws Exception { bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15))); - assertThat(((DateTime) bucket.getTo()), nullValue()); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), nullValue()); assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(numDocs - 4L)); @@ -269,8 +272,8 @@ public void testSingleValueFieldWithStringDatesWithCustomFormat() throws Excepti Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("*-2012-02-15")); - assertThat(((DateTime) bucket.getFrom()), nullValue()); - assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), nullValue()); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15))); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("2012-02-15")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -278,8 +281,8 @@ public void testSingleValueFieldWithStringDatesWithCustomFormat() throws Excepti bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-02-15-2012-03-15")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15))); - assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15))); assertThat(bucket.getFromAsString(), equalTo("2012-02-15")); assertThat(bucket.getToAsString(), equalTo("2012-03-15")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -287,19 +290,17 @@ public void testSingleValueFieldWithStringDatesWithCustomFormat() throws Excepti bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-03-15-*")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15))); - assertThat(((DateTime) bucket.getTo()), nullValue()); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), nullValue()); assertThat(bucket.getFromAsString(), equalTo("2012-03-15")); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(numDocs - 4L)); } public void testSingleValueFieldWithDateMath() throws Exception { - DateTimeZone timezone = randomDateTimeZone(); - int timeZoneOffset = timezone.getOffset(date(2, 15)); - // if time zone is UTC (or equivalent), time zone suffix is "Z", else something like "+03:00", which we get with the "ZZ" format - String feb15Suffix = timeZoneOffset == 0 ? "Z" : date(2,15, timezone).toString("ZZ"); - String mar15Suffix = timeZoneOffset == 0 ? "Z" : date(3,15, timezone).toString("ZZ"); + ZoneId timezone = randomZone(); + int timeZoneOffset = timezone.getRules().getOffset(date(2, 15).toInstant()).getTotalSeconds(); + String suffix = timezone.equals(ZoneOffset.UTC) ? "Z" : timezone.getId(); long expectedFirstBucketCount = timeZoneOffset < 0 ? 3L : 2L; SearchResponse response = client().prepareSearch("idx") @@ -321,29 +322,29 @@ public void testSingleValueFieldWithDateMath() throws Exception { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000" + feb15Suffix)); - assertThat(((DateTime) bucket.getFrom()), nullValue()); - assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15, timezone).toDateTime(DateTimeZone.UTC))); + assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000" + suffix)); + assertThat(((ZonedDateTime) bucket.getFrom()), nullValue()); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15, timezone).withZoneSameInstant(ZoneOffset.UTC))); assertThat(bucket.getFromAsString(), nullValue()); - assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix)); + assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000" + suffix)); assertThat(bucket.getDocCount(), equalTo(expectedFirstBucketCount)); bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix + - "-2012-03-15T00:00:00.000" + mar15Suffix)); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15, timezone).toDateTime(DateTimeZone.UTC))); - assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15, timezone).toDateTime(DateTimeZone.UTC))); - assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix)); - assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix)); + assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000" + suffix + + "-2012-03-15T00:00:00.000" + suffix)); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15, timezone).withZoneSameInstant(ZoneOffset.UTC))); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15, timezone).withZoneSameInstant(ZoneOffset.UTC))); + assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000" + suffix)); + assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000" + suffix)); assertThat(bucket.getDocCount(), equalTo(2L)); bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix + "-*")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15, timezone).toDateTime(DateTimeZone.UTC))); - assertThat(((DateTime) bucket.getTo()), nullValue()); - assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix)); + assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000" + suffix + "-*")); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15, timezone).withZoneSameInstant(ZoneOffset.UTC))); + assertThat(((ZonedDateTime) bucket.getTo()), nullValue()); + assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000" + suffix)); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(numDocs - 2L - expectedFirstBucketCount)); } @@ -369,8 +370,8 @@ public void testSingleValueFieldWithCustomKey() throws Exception { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("r1")); - assertThat(((DateTime) bucket.getFrom()), nullValue()); - assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), nullValue()); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15))); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -378,8 +379,8 @@ public void testSingleValueFieldWithCustomKey() throws Exception { bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("r2")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15))); - assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15))); assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -387,8 +388,8 @@ public void testSingleValueFieldWithCustomKey() throws Exception { bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("r3")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15))); - assertThat(((DateTime) bucket.getTo()), nullValue()); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), nullValue()); assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(numDocs - 4L)); @@ -429,8 +430,8 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("r1")); - assertThat(((DateTime) bucket.getFrom()), nullValue()); - assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), nullValue()); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15))); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -444,8 +445,8 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("r2")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15))); - assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15))); assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -459,8 +460,8 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("r3")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15))); - assertThat(((DateTime) bucket.getTo()), nullValue()); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), nullValue()); assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(numDocs - 4L)); @@ -502,8 +503,8 @@ public void testMultiValuedField() throws Exception { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), nullValue()); - assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), nullValue()); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15))); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -511,8 +512,8 @@ public void testMultiValuedField() throws Exception { bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15))); - assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15))); assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(3L)); @@ -520,8 +521,8 @@ public void testMultiValuedField() throws Exception { bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15))); - assertThat(((DateTime) bucket.getTo()), nullValue()); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), nullValue()); assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(numDocs - 2L)); @@ -557,8 +558,8 @@ public void testMultiValuedFieldWithValueScript() throws Exception { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), nullValue()); - assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), nullValue()); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15))); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(1L)); @@ -566,8 +567,8 @@ public void testMultiValuedFieldWithValueScript() throws Exception { bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15))); - assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15))); assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -575,8 +576,8 @@ public void testMultiValuedFieldWithValueScript() throws Exception { bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15))); - assertThat(((DateTime) bucket.getTo()), nullValue()); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), nullValue()); assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(numDocs - 1L)); @@ -616,8 +617,8 @@ public void testScriptSingleValue() throws Exception { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), nullValue()); - assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), nullValue()); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15))); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -625,8 +626,8 @@ public void testScriptSingleValue() throws Exception { bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15))); - assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15))); assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -634,8 +635,8 @@ public void testScriptSingleValue() throws Exception { bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15))); - assertThat(((DateTime) bucket.getTo()), nullValue()); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), nullValue()); assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(numDocs - 4L)); @@ -675,8 +676,8 @@ public void testScriptMultiValued() throws Exception { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), nullValue()); - assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), nullValue()); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15))); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -684,8 +685,8 @@ public void testScriptMultiValued() throws Exception { bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15))); - assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15))); assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(3L)); @@ -693,8 +694,8 @@ public void testScriptMultiValued() throws Exception { bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15))); - assertThat(((DateTime) bucket.getTo()), nullValue()); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), nullValue()); assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(numDocs - 2L)); @@ -723,8 +724,8 @@ public void testUnmapped() throws Exception { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), nullValue()); - assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), nullValue()); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15))); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(0L)); @@ -732,8 +733,8 @@ public void testUnmapped() throws Exception { bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15))); - assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15))); assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(0L)); @@ -741,8 +742,8 @@ public void testUnmapped() throws Exception { bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15))); - assertThat(((DateTime) bucket.getTo()), nullValue()); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), nullValue()); assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(0L)); @@ -769,8 +770,8 @@ public void testUnmappedWithStringDates() throws Exception { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), nullValue()); - assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), nullValue()); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15))); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(0L)); @@ -778,8 +779,8 @@ public void testUnmappedWithStringDates() throws Exception { bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15))); - assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15))); assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(0L)); @@ -787,8 +788,8 @@ public void testUnmappedWithStringDates() throws Exception { bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15))); - assertThat(((DateTime) bucket.getTo()), nullValue()); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), nullValue()); assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(0L)); @@ -815,8 +816,8 @@ public void testPartiallyUnmapped() throws Exception { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), nullValue()); - assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), nullValue()); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15))); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -824,8 +825,8 @@ public void testPartiallyUnmapped() throws Exception { bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15))); - assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15))); assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -833,8 +834,8 @@ public void testPartiallyUnmapped() throws Exception { bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*")); - assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15))); - assertThat(((DateTime) bucket.getTo()), nullValue()); + assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15))); + assertThat(((ZonedDateTime) bucket.getTo()), nullValue()); assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z")); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(numDocs - 4L)); @@ -859,8 +860,8 @@ public void testEmptyAggregation() throws Exception { assertThat(dateRange.getName(), equalTo("date_range")); assertThat(buckets.size(), is(1)); assertThat((String) buckets.get(0).getKey(), equalTo("0-1")); - assertThat(((DateTime) buckets.get(0).getFrom()).getMillis(), equalTo(0L)); - assertThat(((DateTime) buckets.get(0).getTo()).getMillis(), equalTo(1L)); + assertThat(((ZonedDateTime) buckets.get(0).getFrom()).toInstant().toEpochMilli(), equalTo(0L)); + assertThat(((ZonedDateTime) buckets.get(0).getTo()).toInstant().toEpochMilli(), equalTo(1L)); assertThat(buckets.get(0).getDocCount(), equalTo(0L)); assertThat(buckets.get(0).getAggregations().asList().isEmpty(), is(true)); } @@ -903,7 +904,8 @@ public void testDontCacheScripts() throws Exception { params.put("fieldname", "date"); SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(dateRange("foo").field("date") .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.DOUBLE_PLUS_ONE_MONTH, params)) - .addRange(new DateTime(2012, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC), new DateTime(2013, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC))) + .addRange(ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2013, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))) .get(); assertSearchResponse(r); @@ -915,7 +917,8 @@ public void testDontCacheScripts() throws Exception { // To make sure that the cache is working test that a request not using // a script is cached r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(dateRange("foo").field("date") - .addRange(new DateTime(2012, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC), new DateTime(2013, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC))) + .addRange(ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2013, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))) .get(); assertSearchResponse(r); @@ -969,10 +972,9 @@ public void testRangeWithFormatStringValue() throws Exception { assertBucket(buckets.get(1), 1L, "3000000-4000000", 3000000L, 4000000L); // providing numeric input without format should throw an exception - Exception e = expectThrows(Exception.class, () -> client().prepareSearch(indexName).setSize(0) + ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> client().prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000)).get()); - Throwable cause = e.getCause(); - assertThat(cause.getMessage(), + assertThat(e.getDetailedMessage(), containsString("failed to parse date field [1000000] with format [strict_hour_minute_second]")); } @@ -984,9 +986,9 @@ public void testRangeWithFormatNumericValue() throws Exception { String indexName = "dateformat_numeric_test_idx"; assertAcked(prepareCreate(indexName).addMapping("type", "date", "type=date,format=epoch_second")); indexRandom(true, - client().prepareIndex(indexName, "type", "1").setSource(jsonBuilder().startObject().field("date", 1000).endObject()), + client().prepareIndex(indexName, "type", "1").setSource(jsonBuilder().startObject().field("date", 1002).endObject()), client().prepareIndex(indexName, "type", "2").setSource(jsonBuilder().startObject().field("date", 2000).endObject()), - client().prepareIndex(indexName, "type", "3").setSource(jsonBuilder().startObject().field("date", 3000).endObject())); + client().prepareIndex(indexName, "type", "3").setSource(jsonBuilder().startObject().field("date", 3008).endObject())); // using no format should work when to/from is compatible with format in // mapping @@ -994,39 +996,39 @@ public void testRangeWithFormatNumericValue() throws Exception { .addAggregation(dateRange("date_range").field("date").addRange(1000, 3000).addRange(3000, 4000)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); List buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2); - assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); - assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); + if (JavaVersion.current().getVersion().get(0) == 8) { + assertBucket(buckets.get(0), 2L, "1000.0-3000.0", 1000000L, 3000000L); + assertBucket(buckets.get(1), 1L, "3000.0-4000.0", 3000000L, 4000000L); + } else { + assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); + assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); + } // using no format should also work when and to/from are string values searchResponse = client().prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange("1000", "3000").addRange("3000", "4000")).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2); - assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); - assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); + if (JavaVersion.current().getVersion().get(0) == 8) { + assertBucket(buckets.get(0), 2L, "1000.0-3000.0", 1000000L, 3000000L); + assertBucket(buckets.get(1), 1L, "3000.0-4000.0", 3000000L, 4000000L); + } else { + assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); + assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); + } // also e-notation should work, fractional parts should be truncated searchResponse = client().prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange(1.0e3, 3000.8123).addRange(3000.8123, 4.0e3)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2); - assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); - assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); - - // also e-notation and floats provided as string also be truncated (see: #14641) - searchResponse = client().prepareSearch(indexName).setSize(0) - .addAggregation(dateRange("date_range").field("date").addRange("1.0e3", "3.0e3").addRange("3.0e3", "4.0e3")).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2); - assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); - assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); - - searchResponse = client().prepareSearch(indexName).setSize(0) - .addAggregation(dateRange("date_range").field("date").addRange("1000.123", "3000.8").addRange("3000.8", "4000.3")).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); - buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2); - assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); - assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); + if (JavaVersion.current().getVersion().get(0) == 8) { + assertBucket(buckets.get(0), 2L, "1000.0-3000.0", 1000000L, 3000000L); + assertBucket(buckets.get(1), 1L, "3000.0-4000.0", 3000000L, 4000000L); + } else { + assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); + assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); + } // using different format should work when to/from is compatible with // format in aggregation @@ -1061,8 +1063,8 @@ private static List checkBuckets(Range dateRange, String expectedA private static void assertBucket(Bucket bucket, long bucketSize, String expectedKey, long expectedFrom, long expectedTo) { assertThat(bucket.getDocCount(), equalTo(bucketSize)); assertThat((String) bucket.getKey(), equalTo(expectedKey)); - assertThat(((DateTime) bucket.getFrom()).getMillis(), equalTo(expectedFrom)); - assertThat(((DateTime) bucket.getTo()).getMillis(), equalTo(expectedTo)); + assertThat(((ZonedDateTime) bucket.getFrom()).toInstant().toEpochMilli(), equalTo(expectedFrom)); + assertThat(((ZonedDateTime) bucket.getTo()).toInstant().toEpochMilli(), equalTo(expectedTo)); assertThat(bucket.getAggregations().asList().isEmpty(), is(true)); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java index 1c198fd3ca5d6..34164bc28967c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java @@ -65,7 +65,7 @@ protected DateRangeAggregationBuilder createTestAggregatorBuilder() { factory.missing(randomIntBetween(0, 10)); } if (randomBoolean()) { - factory.timeZone(randomDateTimeZone()); + factory.timeZone(randomZone()); } return factory; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java index e7bf0fe4cf700..b09277aca6c6d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java @@ -22,11 +22,10 @@ import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongSet; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.time.DateFormatters; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -124,7 +123,7 @@ public void setupSuiteScopeCluster() throws Exception { double doubleTerm = longTerm * Math.PI; ZonedDateTime time = ZonedDateTime.of(2014, 1, ((int) longTerm % 20) + 1, 0, 0, 0, 0, ZoneOffset.UTC); - String dateTerm = DateFormatters.forPattern("yyyy-MM-dd").format(time); + String dateTerm = DateFormatter.forPattern("yyyy-MM-dd").format(time); final int frequency = randomBoolean() ? 1 : randomIntBetween(2, 20); for (int j = 0; j < frequency; ++j) { indexRequests.add(client().prepareIndex("idx", "type").setSource(jsonBuilder() diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java index ac985660399d7..d31f7a89b462e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilderTests.java @@ -42,7 +42,7 @@ private DateHistogramValuesSourceBuilder randomDateHistogramSourceBuilder() { histo.interval(randomNonNegativeLong()); } if (randomBoolean()) { - histo.timeZone(randomDateTimeZone()); + histo.timeZone(randomZone()); } if (randomBoolean()) { histo.missingBucket(true); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index e945eeba519f4..5f219ee6be948 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -39,6 +39,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.IpFieldMapper; @@ -57,12 +58,12 @@ import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.sort.SortOrder; -import org.joda.time.DateTimeZone; import org.junit.After; import org.junit.Before; import java.io.IOException; import java.net.InetAddress; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -1155,8 +1156,7 @@ public void testThatDateHistogramFailsFormatAfter() throws IOException { }, (result) -> {} )); - assertThat(exc.getCause(), instanceOf(IllegalArgumentException.class)); - assertThat(exc.getCause().getMessage(), containsString("Parse failure")); + assertThat(exc.getMessage(), containsString("failed to parse date field [1474329600000]")); } public void testWithDateHistogramAndTimeZone() throws IOException { @@ -1176,7 +1176,7 @@ public void testWithDateHistogramAndTimeZone() throws IOException { DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date") .field("date") .dateHistogramInterval(DateHistogramInterval.days(1)) - .timeZone(DateTimeZone.forOffsetHours(1)); + .timeZone(ZoneOffset.ofHours(1)); return new CompositeAggregationBuilder("name", Collections.singletonList(histo)); }, (result) -> { @@ -1196,7 +1196,7 @@ public void testWithDateHistogramAndTimeZone() throws IOException { DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date") .field("date") .dateHistogramInterval(DateHistogramInterval.days(1)) - .timeZone(DateTimeZone.forOffsetHours(1)); + .timeZone(ZoneOffset.ofHours(1)); return new CompositeAggregationBuilder("name", Collections.singletonList(histo)) .aggregateAfter(createAfterKey("date", 1474326000000L)); @@ -1835,6 +1835,6 @@ private static Map> createDocument(Object... fields) { } private static long asLong(String dateTime) { - return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(dateTime).getMillis(); + return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli(); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java index be9a760150427..35bf575d046be 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/InternalCompositeTests.java @@ -30,10 +30,10 @@ import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalMultiBucketAggregationTestCase; -import org.joda.time.DateTimeZone; import org.junit.After; import java.io.IOException; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -64,7 +64,7 @@ private static DocValueFormat randomDocValueFormat(boolean isLong) { if (isLong) { // we use specific format only for date histogram on a long/date field if (randomBoolean()) { - return new DocValueFormat.DateTime(DateFormatter.forPattern("epoch_second"), DateTimeZone.forOffsetHours(1)); + return new DocValueFormat.DateTime(DateFormatter.forPattern("epoch_second"), ZoneOffset.ofHours(1)); } else { return DocValueFormat.RAW; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java index 6b4d1482adb5e..9293b33e22f43 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -41,11 +42,13 @@ import org.elasticsearch.search.aggregations.metrics.InternalStats; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.hamcrest.Matchers; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import org.junit.Assert; import java.io.IOException; +import java.time.LocalDate; +import java.time.YearMonth; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -59,17 +62,17 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase { private static final String DATE_FIELD = "date"; private static final String INSTANT_FIELD = "instant"; - private static final List DATES_WITH_TIME = Arrays.asList( - new DateTime(2010, 3, 12, 1, 7, 45, DateTimeZone.UTC), - new DateTime(2010, 4, 27, 3, 43, 34, DateTimeZone.UTC), - new DateTime(2012, 5, 18, 4, 11, 0, DateTimeZone.UTC), - new DateTime(2013, 5, 29, 5, 11, 31, DateTimeZone.UTC), - new DateTime(2013, 10, 31, 8, 24, 5, DateTimeZone.UTC), - new DateTime(2015, 2, 13, 13, 9, 32, DateTimeZone.UTC), - new DateTime(2015, 6, 24, 13, 47, 43, DateTimeZone.UTC), - new DateTime(2015, 11, 13, 16, 14, 34, DateTimeZone.UTC), - new DateTime(2016, 3, 4, 17, 9, 50, DateTimeZone.UTC), - new DateTime(2017, 12, 12, 22, 55, 46, DateTimeZone.UTC)); + private static final List DATES_WITH_TIME = Arrays.asList( + ZonedDateTime.of(2010, 3, 12, 1, 7, 45, 0, ZoneOffset.UTC), + ZonedDateTime.of(2010, 4, 27, 3, 43, 34, 0, ZoneOffset.UTC), + ZonedDateTime.of(2012, 5, 18, 4, 11, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2013, 5, 29, 5, 11, 31, 0, ZoneOffset.UTC), + ZonedDateTime.of(2013, 10, 31, 8, 24, 5, 0, ZoneOffset.UTC), + ZonedDateTime.of(2015, 2, 13, 13, 9, 32, 0, ZoneOffset.UTC), + ZonedDateTime.of(2015, 6, 24, 13, 47, 43, 0, ZoneOffset.UTC), + ZonedDateTime.of(2015, 11, 13, 16, 14, 34, 0, ZoneOffset.UTC), + ZonedDateTime.of(2016, 3, 4, 17, 9, 50, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 12, 12, 22, 55, 46, 0, ZoneOffset.UTC)); private static final Query DEFAULT_QUERY = new MatchAllDocsQuery(); @@ -184,7 +187,7 @@ public void testSubAggregations() throws IOException { } public void testNoDocs() throws IOException { - final List dates = Collections.emptyList(); + final List dates = Collections.emptyList(); final Consumer aggregation = agg -> agg.setNumBuckets(10).field(DATE_FIELD); testSearchCase(DEFAULT_QUERY, dates, aggregation, @@ -209,8 +212,10 @@ public void testAggregateWrongField() throws IOException { } public void testIntervalYear() throws IOException { - final long start = new DateTime(DateTimeZone.UTC).withDate(2015, 1, 1).getMillis(); - final long end = new DateTime(DateTimeZone.UTC).withDate(2017, 12, 31).getMillis(); + + + final long start = LocalDate.of(2015, 1, 1).atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli(); + final long end = LocalDate.of(2017, 12, 31).atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli(); final Query rangeQuery = LongPoint.newRangeQuery(INSTANT_FIELD, start, end); testSearchCase(rangeQuery, DATES_WITH_TIME, aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), @@ -228,8 +233,8 @@ public void testIntervalYear() throws IOException { testSearchAndReduceCase(rangeQuery, DATES_WITH_TIME, aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), histogram -> { - final DateTime startDate = new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC); - final Map expectedDocCount = new HashMap<>(); + final ZonedDateTime startDate = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); + final Map expectedDocCount = new HashMap<>(); expectedDocCount.put(startDate, 3); expectedDocCount.put(startDate.plusYears(1), 1); expectedDocCount.put(startDate.plusYears(2), 1); @@ -243,13 +248,13 @@ public void testIntervalYear() throws IOException { } public void testIntervalMonth() throws IOException { - final List datesForMonthInterval = Arrays.asList( - new DateTime(2017, 1, 1, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 3, 4, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 3, 5, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 3, 6, 0, 0, 0, DateTimeZone.UTC)); + final List datesForMonthInterval = Arrays.asList( + ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 3, 4, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 3, 5, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 3, 6, 0, 0, 0, 0, ZoneOffset.UTC)); testSearchCase(DEFAULT_QUERY, datesForMonthInterval, aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), histogram -> { final List buckets = histogram.getBuckets(); @@ -263,7 +268,7 @@ public void testIntervalMonth() throws IOException { testSearchAndReduceCase(DEFAULT_QUERY, datesForMonthInterval, aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), histogram -> { - final Map expectedDocCount = new HashMap<>(); + final Map expectedDocCount = new HashMap<>(); expectedDocCount.put(datesForMonthInterval.get(0).withDayOfMonth(1), 1); expectedDocCount.put(datesForMonthInterval.get(1).withDayOfMonth(1), 2); expectedDocCount.put(datesForMonthInterval.get(3).withDayOfMonth(1), 3); @@ -287,15 +292,15 @@ public void testWithLargeNumberOfBuckets() { } public void testIntervalDay() throws IOException { - final List datesForDayInterval = Arrays.asList( - new DateTime(2017, 2, 1, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 5, 0, 0, 0, DateTimeZone.UTC)); - final Map expectedDocCount = new HashMap<>(); + final List datesForDayInterval = Arrays.asList( + ZonedDateTime.of(2017, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 5, 0, 0, 0, 0, ZoneOffset.UTC)); + final Map expectedDocCount = new HashMap<>(); expectedDocCount.put(datesForDayInterval.get(0), 1); expectedDocCount.put(datesForDayInterval.get(1), 2); expectedDocCount.put(datesForDayInterval.get(3), 3); @@ -321,16 +326,16 @@ public void testIntervalDay() throws IOException { } public void testIntervalDayWithTZ() throws IOException { - final List datesForDayInterval = Arrays.asList( - new DateTime(2017, 2, 1, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 5, 0, 0, 0, DateTimeZone.UTC)); + final List datesForDayInterval = Arrays.asList( + ZonedDateTime.of(2017, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 5, 0, 0, 0, 0, ZoneOffset.UTC)); testSearchCase(DEFAULT_QUERY, datesForDayInterval, - aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), histogram -> { + aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)), histogram -> { final Map expectedDocCount = new HashMap<>(); expectedDocCount.put("2017-01-31T23:00:00.000-01:00", 1); expectedDocCount.put("2017-02-01T23:00:00.000-01:00", 2); @@ -343,7 +348,7 @@ public void testIntervalDayWithTZ() throws IOException { assertTrue(AggregationInspectionHelper.hasValue(histogram)); }); testSearchAndReduceCase(DEFAULT_QUERY, datesForDayInterval, - aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), histogram -> { + aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)), histogram -> { final Map expectedDocCount = new HashMap<>(); expectedDocCount.put("2017-01-31T00:00:00.000-01:00", 1); expectedDocCount.put("2017-02-01T00:00:00.000-01:00", 2); @@ -358,17 +363,17 @@ public void testIntervalDayWithTZ() throws IOException { } public void testIntervalHour() throws IOException { - final List datesForHourInterval = Arrays.asList( - new DateTime(2017, 2, 1, 9, 2, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 9, 35, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 10, 15, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 13, 6, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 14, 4, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 14, 5, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 15, 59, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 16, 6, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 16, 48, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 16, 59, 0, DateTimeZone.UTC)); + final List datesForHourInterval = Arrays.asList( + ZonedDateTime.of(2017, 2, 1, 9, 2, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 9, 35, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 10, 15, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 13, 6, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 14, 4, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 14, 5, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 15, 59, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 16, 6, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 16, 48, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 16, 59, 0, 0, ZoneOffset.UTC)); testSearchCase(DEFAULT_QUERY, datesForHourInterval, aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD), histogram -> { @@ -384,13 +389,13 @@ public void testIntervalHour() throws IOException { testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval, aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD), histogram -> { - final Map expectedDocCount = new HashMap<>(); - expectedDocCount.put(datesForHourInterval.get(0).withMinuteOfHour(0), 2); - expectedDocCount.put(datesForHourInterval.get(2).withMinuteOfHour(0), 1); - expectedDocCount.put(datesForHourInterval.get(3).withMinuteOfHour(0), 1); - expectedDocCount.put(datesForHourInterval.get(4).withMinuteOfHour(0), 2); - expectedDocCount.put(datesForHourInterval.get(6).withMinuteOfHour(0), 1); - expectedDocCount.put(datesForHourInterval.get(7).withMinuteOfHour(0), 3); + final Map expectedDocCount = new HashMap<>(); + expectedDocCount.put(datesForHourInterval.get(0).withMinute(0), 2); + expectedDocCount.put(datesForHourInterval.get(2).withMinute(0), 1); + expectedDocCount.put(datesForHourInterval.get(3).withMinute(0), 1); + expectedDocCount.put(datesForHourInterval.get(4).withMinute(0), 2); + expectedDocCount.put(datesForHourInterval.get(6).withMinute(0), 1); + expectedDocCount.put(datesForHourInterval.get(7).withMinute(0), 3); final List buckets = histogram.getBuckets(); assertEquals(8, buckets.size()); buckets.forEach(bucket -> @@ -400,10 +405,10 @@ public void testIntervalHour() throws IOException { testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval, aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD), histogram -> { - final Map expectedDocCount = new HashMap<>(); - expectedDocCount.put(datesForHourInterval.get(0).withMinuteOfHour(0), 3); - expectedDocCount.put(datesForHourInterval.get(0).plusHours(3).withMinuteOfHour(0), 3); - expectedDocCount.put(datesForHourInterval.get(0).plusHours(6).withMinuteOfHour(0), 4); + final Map expectedDocCount = new HashMap<>(); + expectedDocCount.put(datesForHourInterval.get(0).withMinute(0), 3); + expectedDocCount.put(datesForHourInterval.get(0).plusHours(3).withMinute(0), 3); + expectedDocCount.put(datesForHourInterval.get(0).plusHours(6).withMinute(0), 4); final List buckets = histogram.getBuckets(); assertEquals(expectedDocCount.size(), buckets.size()); buckets.forEach(bucket -> @@ -413,22 +418,23 @@ public void testIntervalHour() throws IOException { } public void testIntervalHourWithTZ() throws IOException { - final List datesForHourInterval = Arrays.asList( - new DateTime(2017, 2, 1, 9, 2, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 9, 35, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 10, 15, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 13, 6, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 14, 4, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 14, 5, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 15, 59, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 16, 6, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 16, 48, 0, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 16, 59, 0, DateTimeZone.UTC)); + final List datesForHourInterval = Arrays.asList( + ZonedDateTime.of(2017, 2, 1, 9, 2, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 9, 35, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 10, 15, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 13, 6, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 14, 4, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 14, 5, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 15, 59, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 16, 6, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 16, 48, 0, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 16, 59, 0, 0, ZoneOffset.UTC)); testSearchCase(DEFAULT_QUERY, datesForHourInterval, - aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), + aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)), histogram -> { final List dateStrings = datesForHourInterval.stream() - .map(dateTime -> dateTime.withZone(DateTimeZone.forOffsetHours(-1)).toString()).collect(Collectors.toList()); + .map(dateTime -> DateFormatter.forPattern("strict_date_time") + .format(dateTime.withZoneSameInstant(ZoneOffset.ofHours(-1)))).collect(Collectors.toList()); final List buckets = histogram.getBuckets(); assertEquals(datesForHourInterval.size(), buckets.size()); for (int i = 0; i < buckets.size(); i++) { @@ -439,7 +445,7 @@ public void testIntervalHourWithTZ() throws IOException { } ); testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval, - aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), + aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)), histogram -> { final Map expectedDocCount = new HashMap<>(); expectedDocCount.put("2017-02-01T08:00:00.000-01:00", 2); @@ -458,10 +464,10 @@ public void testIntervalHourWithTZ() throws IOException { public void testRandomSecondIntervals() throws IOException { final int length = 120; - final List dataset = new ArrayList<>(length); - final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, 0, DateTimeZone.UTC); + final List dataset = new ArrayList<>(length); + final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); for (int i = 0; i < length; i++) { - final DateTime date = startDate.plusSeconds(i); + final ZonedDateTime date = startDate.plusSeconds(i); dataset.add(date); } final Map bucketsToExpectedDocCountMap = new HashMap<>(); @@ -487,10 +493,10 @@ public void testRandomSecondIntervals() throws IOException { public void testRandomMinuteIntervals() throws IOException { final int length = 120; - final List dataset = new ArrayList<>(length); - final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC); + final List dataset = new ArrayList<>(length); + final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); for (int i = 0; i < length; i++) { - final DateTime date = startDate.plusMinutes(i); + final ZonedDateTime date = startDate.plusMinutes(i); dataset.add(date); } final Map bucketsToExpectedDocCountMap = new HashMap<>(); @@ -516,10 +522,10 @@ public void testRandomMinuteIntervals() throws IOException { public void testRandomHourIntervals() throws IOException { final int length = 72; - final List dataset = new ArrayList<>(length); - final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC); + final List dataset = new ArrayList<>(length); + final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); for (int i = 0; i < length; i++) { - final DateTime date = startDate.plusHours(i); + final ZonedDateTime date = startDate.plusHours(i); dataset.add(date); } final Map bucketsToExpectedDocCountMap = new HashMap<>(); @@ -544,10 +550,10 @@ public void testRandomHourIntervals() throws IOException { public void testRandomDayIntervals() throws IOException { final int length = 140; - final List dataset = new ArrayList<>(length); - final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC); + final List dataset = new ArrayList<>(length); + final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); for (int i = 0; i < length; i++) { - final DateTime date = startDate.plusDays(i); + final ZonedDateTime date = startDate.plusDays(i); dataset.add(date); } final int randomChoice = randomIntBetween(1, 3); @@ -583,17 +589,17 @@ public void testRandomDayIntervals() throws IOException { final int randomIndex = randomInt(2); final Histogram.Bucket bucket = buckets.get(randomIndex); assertEquals(startDate.plusMonths(randomIndex), bucket.getKey()); - assertEquals(startDate.plusMonths(randomIndex).dayOfMonth().getMaximumValue(), bucket.getDocCount()); + assertEquals(YearMonth.from(startDate.plusMonths(randomIndex)).lengthOfMonth(), bucket.getDocCount()); }); } } public void testRandomMonthIntervals() throws IOException { final int length = 60; - final List dataset = new ArrayList<>(length); - final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC); + final List dataset = new ArrayList<>(length); + final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); for (int i = 0; i < length; i++) { - final DateTime date = startDate.plusMonths(i); + final ZonedDateTime date = startDate.plusMonths(i); dataset.add(date); } final Map bucketsToExpectedDocCountMap = new HashMap<>(); @@ -617,10 +623,10 @@ public void testRandomMonthIntervals() throws IOException { public void testRandomYearIntervals() throws IOException { final int length = 300; - final List dataset = new ArrayList<>(length); - final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC); + final List dataset = new ArrayList<>(length); + final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); for (int i = 0; i < length; i++) { - final DateTime date = startDate.plusYears(i); + final ZonedDateTime date = startDate.plusYears(i); dataset.add(date); } final Map bucketsToExpectedDocCountMap = new HashMap<>(); @@ -646,12 +652,12 @@ public void testRandomYearIntervals() throws IOException { } public void testIntervalMinute() throws IOException { - final List datesForMinuteInterval = Arrays.asList( - new DateTime(2017, 2, 1, 9, 2, 35, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 9, 2, 59, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 9, 15, 37, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 9, 16, 4, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 9, 16, 42, DateTimeZone.UTC)); + final List datesForMinuteInterval = Arrays.asList( + ZonedDateTime.of(2017, 2, 1, 9, 2, 35, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 9, 2, 59, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 9, 15, 37, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 9, 16, 4, 0, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 9, 16, 42, 0, ZoneOffset.UTC)); testSearchCase(DEFAULT_QUERY, datesForMinuteInterval, aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), @@ -668,10 +674,10 @@ public void testIntervalMinute() throws IOException { testSearchAndReduceCase(DEFAULT_QUERY, datesForMinuteInterval, aggregation -> aggregation.setNumBuckets(15).field(DATE_FIELD), histogram -> { - final Map expectedDocCount = new HashMap<>(); - expectedDocCount.put(datesForMinuteInterval.get(0).withSecondOfMinute(0), 2); - expectedDocCount.put(datesForMinuteInterval.get(2).withSecondOfMinute(0), 1); - expectedDocCount.put(datesForMinuteInterval.get(3).withSecondOfMinute(0), 2); + final Map expectedDocCount = new HashMap<>(); + expectedDocCount.put(datesForMinuteInterval.get(0).withSecond(0), 2); + expectedDocCount.put(datesForMinuteInterval.get(2).withSecond(0), 1); + expectedDocCount.put(datesForMinuteInterval.get(3).withSecond(0), 2); final List buckets = histogram.getBuckets(); assertEquals(15, buckets.size()); buckets.forEach(bucket -> @@ -681,15 +687,15 @@ public void testIntervalMinute() throws IOException { } public void testIntervalSecond() throws IOException { - final List datesForSecondInterval = Arrays.asList( - new DateTime(2017, 2, 1, 0, 0, 5, 15, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 0, 0, 7, 299, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 0, 0, 7, 74, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 0, 0, 11, 688, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 0, 0, 11, 210, DateTimeZone.UTC), - new DateTime(2017, 2, 1, 0, 0, 11, 380, DateTimeZone.UTC)); - final DateTime startDate = datesForSecondInterval.get(0).withMillisOfSecond(0); - final Map expectedDocCount = new HashMap<>(); + final List datesForSecondInterval = Arrays.asList( + ZonedDateTime.of(2017, 2, 1, 0, 0, 5, 15, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 0, 0, 7, 299, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 0, 0, 7, 74, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 688, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 210, ZoneOffset.UTC), + ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 380, ZoneOffset.UTC)); + final ZonedDateTime startDate = datesForSecondInterval.get(0).withNano(0); + final Map expectedDocCount = new HashMap<>(); expectedDocCount.put(startDate, 1); expectedDocCount.put(startDate.plusSeconds(2), 2); expectedDocCount.put(startDate.plusSeconds(6), 3); @@ -712,19 +718,19 @@ public void testIntervalSecond() throws IOException { ); } - private void testSearchCase(final Query query, final List dataset, + private void testSearchCase(final Query query, final List dataset, final Consumer configure, final Consumer verify) throws IOException { executeTestCase(false, query, dataset, configure, verify); } - private void testSearchAndReduceCase(final Query query, final List dataset, + private void testSearchAndReduceCase(final Query query, final List dataset, final Consumer configure, final Consumer verify) throws IOException { executeTestCase(true, query, dataset, configure, verify); } - private void testBothCases(final Query query, final List dataset, + private void testBothCases(final Query query, final List dataset, final Consumer configure, final Consumer verify) throws IOException { executeTestCase(false, query, dataset, configure, verify); @@ -745,18 +751,18 @@ protected IndexSettings createIndexSettings() { ); } - private void executeTestCase(final boolean reduced, final Query query, final List dataset, + private void executeTestCase(final boolean reduced, final Query query, final List dataset, final Consumer configure, final Consumer verify) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { final Document document = new Document(); - for (final DateTime date : dataset) { + for (final ZonedDateTime date : dataset) { if (frequently()) { indexWriter.commit(); } - final long instant = date.getMillis(); + final long instant = date.toInstant().toEpochMilli(); document.add(new SortedNumericDocValuesField(DATE_FIELD, instant)); document.add(new LongPoint(INSTANT_FIELD, instant)); indexWriter.addDocument(document); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java index c1b9396664a22..2fbf60a3ddccb 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java @@ -30,6 +30,7 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -474,6 +475,6 @@ private void executeTestCase(boolean reduced, Query query, List dataset, } private static long asLong(String dateTime) { - return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(dateTime).getMillis(); + return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli(); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java index 5148b0b85754f..c65b21ef72d32 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramTests.java @@ -31,9 +31,10 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.BucketOrder; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.List; @@ -137,7 +138,7 @@ private static Document documentForDate(String field, long millis) { } public void testRewriteTimeZone() throws IOException { - DateFormatter format = DateFormatters.forPattern("strict_date_optional_time"); + DateFormatter format = DateFormatter.forPattern("strict_date_optional_time"); try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig())) { @@ -166,15 +167,15 @@ public void testRewriteTimeZone() throws IOException { assertNull(builder.rewriteTimeZone(shardContextThatCrosses)); // fixed timeZone => no rewrite - DateTimeZone tz = DateTimeZone.forOffsetHours(1); + ZoneId tz = ZoneOffset.ofHours(1); builder.timeZone(tz); assertSame(tz, builder.rewriteTimeZone(shardContextThatDoesntCross)); assertSame(tz, builder.rewriteTimeZone(shardContextThatCrosses)); // daylight-saving-times => rewrite if doesn't cross - tz = DateTimeZone.forID("Europe/Paris"); + tz = ZoneId.of("Europe/Paris"); builder.timeZone(tz); - assertEquals(DateTimeZone.forOffsetHours(1), builder.rewriteTimeZone(shardContextThatDoesntCross)); + assertEquals(ZoneOffset.ofHours(1), builder.rewriteTimeZone(shardContextThatDoesntCross)); assertSame(tz, builder.rewriteTimeZone(shardContextThatCrosses)); // Rounded values are no longer all within the same transitions => no rewrite @@ -187,7 +188,7 @@ public void testRewriteTimeZone() throws IOException { builder.timeZone(tz); builder.interval(1000L * 60 * 60 * 24); // ~ 1 day - assertEquals(DateTimeZone.forOffsetHours(1), builder.rewriteTimeZone(shardContextThatDoesntCross)); + assertEquals(ZoneOffset.ofHours(1), builder.rewriteTimeZone(shardContextThatDoesntCross)); assertSame(tz, builder.rewriteTimeZone(shardContextThatCrosses)); // Because the interval is large, rounded values are not diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java index 486f78778c452..0980bb7cf97ec 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java @@ -36,13 +36,9 @@ import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTimeZone; -import org.joda.time.Instant; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; import java.io.IOException; -import java.util.Locale; +import java.time.ZoneOffset; import static java.lang.Math.max; import static java.lang.Math.min; @@ -66,17 +62,19 @@ public static ExtendedBounds randomExtendedBounds() { * Construct a random {@link ExtendedBounds} in pre-parsed form. */ public static ExtendedBounds randomParsedExtendedBounds() { + long maxDateValue = 253402300799999L; // end of year 9999 + long minDateValue = -377705116800000L; // beginning of year -9999 if (randomBoolean()) { // Construct with one missing bound if (randomBoolean()) { - return new ExtendedBounds(null, randomLong()); + return new ExtendedBounds(null, maxDateValue); } - return new ExtendedBounds(randomLong(), null); + return new ExtendedBounds(minDateValue, null); } - long a = randomLong(); + long a = randomLongBetween(minDateValue, maxDateValue); long b; do { - b = randomLong(); + b = randomLongBetween(minDateValue, maxDateValue); } while (a == b); long min = min(a, b); long max = max(a, b); @@ -88,9 +86,9 @@ public static ExtendedBounds randomParsedExtendedBounds() { */ public static ExtendedBounds unparsed(ExtendedBounds template) { // It'd probably be better to randomize the formatter - DateTimeFormatter formatter = ISODateTimeFormat.dateTime().withLocale(Locale.ROOT).withZone(DateTimeZone.UTC); - String minAsStr = template.getMin() == null ? null : formatter.print(new Instant(template.getMin())); - String maxAsStr = template.getMax() == null ? null : formatter.print(new Instant(template.getMax())); + DateFormatter formatter = DateFormatter.forPattern("strict_date_time").withZone(ZoneOffset.UTC); + String minAsStr = template.getMin() == null ? null : formatter.formatMillis(template.getMin()); + String maxAsStr = template.getMax() == null ? null : formatter.formatMillis(template.getMax()); return new ExtendedBounds(minAsStr, maxAsStr); } @@ -104,7 +102,7 @@ public void testParseAndValidate() { null, xContentRegistry(), writableRegistry(), null, null, () -> now, null); when(context.getQueryShardContext()).thenReturn(qsc); DateFormatter formatter = DateFormatter.forPattern("dateOptionalTime"); - DocValueFormat format = new DocValueFormat.DateTime(formatter, DateTimeZone.UTC); + DocValueFormat format = new DocValueFormat.DateTime(formatter, ZoneOffset.UTC); ExtendedBounds expected = randomParsedExtendedBounds(); ExtendedBounds parsed = unparsed(expected).parseAndValidate("test", context, format); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java index dd3425c20f43c..fe5c967f54be8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java @@ -19,8 +19,8 @@ package org.elasticsearch.search.aggregations.bucket.histogram; +import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.rounding.DateTimeUnit; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; @@ -28,12 +28,12 @@ import org.elasticsearch.search.aggregations.bucket.histogram.InternalAutoDateHistogram.BucketInfo; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalMultiBucketAggregationTestCase; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import java.time.Instant; import java.time.OffsetDateTime; +import java.time.ZoneId; import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -89,16 +89,16 @@ protected InternalAutoDateHistogram createTestInstance(String name, */ public void testGetAppropriateRoundingUsesCorrectIntervals() { RoundingInfo[] roundings = new RoundingInfo[6]; - DateTimeZone timeZone = DateTimeZone.UTC; + ZoneId timeZone = ZoneOffset.UTC; // Since we pass 0 as the starting index to getAppropriateRounding, we'll also use // an innerInterval that is quite large, such that targetBuckets * roundings[i].getMaximumInnerInterval() // will be larger than the estimate. - roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone), - 1000L, "s", 1000); - roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone), - 60 * 1000L, "m", 1, 5, 10, 30); - roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone), - 60 * 60 * 1000L, "h", 1, 3, 12); + roundings[0] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.SECOND_OF_MINUTE, timeZone), + 1000L, "s",1000); + roundings[1] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.MINUTES_OF_HOUR, timeZone), + 60 * 1000L, "m",1, 5, 10, 30); + roundings[2] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.HOUR_OF_DAY, timeZone), + 60 * 60 * 1000L, "h",1, 3, 12); OffsetDateTime timestamp = Instant.parse("2018-01-01T00:00:01.000Z").atOffset(ZoneOffset.UTC); // We want to pass a roundingIdx of zero, because in order to reproduce this bug, we need the function @@ -117,7 +117,7 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List= keyForBucket && roundedBucketKey < keyForBucket + intervalInMillis) { @@ -194,7 +194,7 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List actualCounts = new TreeMap<>(); for (Histogram.Bucket bucket : reduced.getBuckets()) { - actualCounts.compute(((DateTime) bucket.getKey()).getMillis(), + actualCounts.compute(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli(), (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); } assertEquals(expectedCounts, actualCounts); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java index dd5d06f8785f7..961a05a7c40fd 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java @@ -19,8 +19,8 @@ package org.elasticsearch.search.aggregations.bucket.histogram; +import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.BucketOrder; @@ -28,8 +28,8 @@ import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalMultiBucketAggregationTestCase; -import org.joda.time.DateTime; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -104,7 +104,7 @@ protected void assertReduced(InternalDateHistogram reduced, List expectedCounts = new TreeMap<>(); for (Histogram histogram : inputs) { for (Histogram.Bucket bucket : histogram.getBuckets()) { - expectedCounts.compute(((DateTime) bucket.getKey()).getMillis(), + expectedCounts.compute(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli(), (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); } } @@ -139,7 +139,7 @@ protected void assertReduced(InternalDateHistogram reduced, List actualCounts = new TreeMap<>(); for (Histogram.Bucket bucket : reduced.getBuckets()) { - actualCounts.compute(((DateTime) bucket.getKey()).getMillis(), + actualCounts.compute(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli(), (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); } assertEquals(expectedCounts, actualCounts); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java index d0027208b104b..78c4d18218eea 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java @@ -41,9 +41,9 @@ import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregator; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneOffset; import java.util.Arrays; import java.util.Collections; import java.util.function.Consumer; @@ -260,7 +260,7 @@ public void testWeightSetTimezone() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("value_field").build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder() .setFieldName("weight_field") - .setTimeZone(DateTimeZone.UTC) + .setTimeZone(ZoneOffset.UTC) .build(); WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") .value(valueConfig) @@ -283,7 +283,7 @@ public void testWeightSetTimezone() throws IOException { public void testValueSetTimezone() throws IOException { MultiValuesSourceFieldConfig valueConfig = new MultiValuesSourceFieldConfig.Builder() .setFieldName("value_field") - .setTimeZone(DateTimeZone.UTC) + .setTimeZone(ZoneOffset.UTC) .build(); MultiValuesSourceFieldConfig weightConfig = new MultiValuesSourceFieldConfig.Builder().setFieldName("weight_field").build(); WeightedAvgAggregationBuilder aggregationBuilder = new WeightedAvgAggregationBuilder("_name") diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java index 59af941812175..3ed1a15603e84 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java @@ -28,6 +28,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -40,9 +41,6 @@ import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.InternalAvg; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.AvgBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.AvgBucketPipelineAggregator; import java.io.IOException; import java.util.ArrayList; @@ -141,8 +139,7 @@ public void testSameAggNames() throws IOException { } } - private static long asLong(String dateTime) { - return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(dateTime).getMillis(); + return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli(); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java index 7cb4371354c3b..22a4fdbdf67bf 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java @@ -31,9 +31,9 @@ import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; -import org.joda.time.DateTime; import java.io.IOException; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -132,10 +132,10 @@ public void testEmptyBucketSort() { assertThat(histogram, notNullValue()); // These become our baseline List timeBuckets = histogram.getBuckets(); - DateTime previousKey = (DateTime) timeBuckets.get(0).getKey(); + ZonedDateTime previousKey = (ZonedDateTime) timeBuckets.get(0).getKey(); for (Histogram.Bucket timeBucket : timeBuckets) { - assertThat(previousKey, lessThanOrEqualTo((DateTime) timeBucket.getKey())); - previousKey = (DateTime) timeBucket.getKey(); + assertThat(previousKey, lessThanOrEqualTo((ZonedDateTime) timeBucket.getKey())); + previousKey = (ZonedDateTime) timeBucket.getKey(); } // Now let's test using size diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java index b1eec2b0f48ca..4b23304e642c0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java @@ -31,6 +31,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -365,6 +366,6 @@ private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consume } private static long asLong(String dateTime) { - return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(dateTime).getMillis(); + return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli(); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java index 62fe7a2a45a60..db1ee6ab18916 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java @@ -21,7 +21,8 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; @@ -31,12 +32,14 @@ import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matcher; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; import org.junit.After; import java.io.IOException; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -60,19 +63,11 @@ public class DateDerivativeIT extends ESIntegTestCase { private static final String IDX_DST_END = "idx_dst_end"; private static final String IDX_DST_KATHMANDU = "idx_dst_kathmandu"; - private DateTime date(int month, int day) { - return new DateTime(2012, month, day, 0, 0, DateTimeZone.UTC); + private ZonedDateTime date(int month, int day) { + return ZonedDateTime.of(2012, month, day, 0, 0, 0, 0, ZoneOffset.UTC); } - private DateTime date(String date) { - return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date); - } - - private static String format(DateTime date, String pattern) { - return DateTimeFormat.forPattern(pattern).print(date); - } - - private static IndexRequestBuilder indexDoc(String idx, DateTime date, int value) throws Exception { + private static IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception { return client().prepareIndex(idx, "type").setSource( jsonBuilder().startObject().timeField("date", date).field("value", value).endObject()); } @@ -124,27 +119,27 @@ public void testSingleValuedField() throws Exception { List buckets = deriv.getBuckets(); assertThat(buckets.size(), equalTo(3)); - DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC); + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(key)); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); assertThat(docCountDeriv, nullValue()); - key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(key)); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(2L)); docCountDeriv = bucket.getAggregations().get("deriv"); assertThat(docCountDeriv, notNullValue()); assertThat(docCountDeriv.value(), equalTo(1d)); - key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(key)); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); docCountDeriv = bucket.getAggregations().get("deriv"); assertThat(docCountDeriv, notNullValue()); @@ -166,28 +161,28 @@ public void testSingleValuedFieldNormalised() throws Exception { List buckets = deriv.getBuckets(); assertThat(buckets.size(), equalTo(3)); - DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC); + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(key)); + assertThat(bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); Derivative docCountDeriv = bucket.getAggregations().get("deriv"); assertThat(docCountDeriv, nullValue()); - key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(key)); + assertThat(bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(2L)); docCountDeriv = bucket.getAggregations().get("deriv"); assertThat(docCountDeriv, notNullValue()); assertThat(docCountDeriv.value(), closeTo(1d, 0.00001)); assertThat(docCountDeriv.normalizedValue(), closeTo(1d / 31d, 0.00001)); - key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(key)); + assertThat(bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); docCountDeriv = bucket.getAggregations().get("deriv"); assertThat(docCountDeriv, notNullValue()); @@ -202,11 +197,14 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstStart() throws Excep createIndex(IDX_DST_START); List builders = new ArrayList<>(); - DateTimeZone timezone = DateTimeZone.forID("CET"); - addNTimes(1, IDX_DST_START, new DateTime("2012-03-24T01:00:00", timezone), builders); - addNTimes(2, IDX_DST_START, new DateTime("2012-03-25T01:00:00", timezone), builders); // day with dst shift, only 23h long - addNTimes(3, IDX_DST_START, new DateTime("2012-03-26T01:00:00", timezone), builders); - addNTimes(4, IDX_DST_START, new DateTime("2012-03-27T01:00:00", timezone), builders); + ZoneId timezone = ZoneId.of("CET"); + DateFormatter formatter = DateFormatter.forPattern("yyyy-MM-dd'T'HH:mm:ss").withZone(timezone); + // epoch millis: 1332547200000 + addNTimes(1, IDX_DST_START, DateFormatters.toZonedDateTime(formatter.parse("2012-03-24T01:00:00")), builders); + // day with dst shift, only 23h long + addNTimes(2, IDX_DST_START, DateFormatters.toZonedDateTime(formatter.parse("2012-03-25T01:00:00")), builders); + addNTimes(3, IDX_DST_START, DateFormatters.toZonedDateTime(formatter.parse("2012-03-26T01:00:00")), builders); + addNTimes(4, IDX_DST_START, DateFormatters.toZonedDateTime(formatter.parse("2012-03-27T01:00:00")), builders); indexRandom(true, builders); ensureSearchable(); @@ -225,11 +223,23 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstStart() throws Excep List buckets = deriv.getBuckets(); assertThat(buckets.size(), equalTo(4)); - assertBucket(buckets.get(0), new DateTime("2012-03-24", timezone).toDateTime(DateTimeZone.UTC), 1L, nullValue(), null, null); - assertBucket(buckets.get(1), new DateTime("2012-03-25", timezone).toDateTime(DateTimeZone.UTC), 2L, notNullValue(), 1d, 1d / 24d); + DateFormatter dateFormatter = DateFormatter.forPattern("yyyy-MM-dd"); + ZonedDateTime expectedKeyFirstBucket = + LocalDate.from(dateFormatter.parse("2012-03-24")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null, null); + + ZonedDateTime expectedKeySecondBucket = + LocalDate.from(dateFormatter.parse("2012-03-25")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(1), expectedKeySecondBucket,2L, notNullValue(), 1d, 1d / 24d); + // the following is normalized using a 23h bucket width - assertBucket(buckets.get(2), new DateTime("2012-03-26", timezone).toDateTime(DateTimeZone.UTC), 3L, notNullValue(), 1d, 1d / 23d); - assertBucket(buckets.get(3), new DateTime("2012-03-27", timezone).toDateTime(DateTimeZone.UTC), 4L, notNullValue(), 1d, 1d / 24d); + ZonedDateTime expectedKeyThirdBucket = + LocalDate.from(dateFormatter.parse("2012-03-26")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d, 1d / 23d); + + ZonedDateTime expectedKeyFourthBucket = + LocalDate.from(dateFormatter.parse("2012-03-27")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d, 1d / 24d); } /** @@ -237,13 +247,15 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstStart() throws Excep */ public void testSingleValuedFieldNormalised_timeZone_CET_DstEnd() throws Exception { createIndex(IDX_DST_END); - DateTimeZone timezone = DateTimeZone.forID("CET"); + ZoneId timezone = ZoneId.of("CET"); List builders = new ArrayList<>(); - addNTimes(1, IDX_DST_END, new DateTime("2012-10-27T01:00:00", timezone), builders); - addNTimes(2, IDX_DST_END, new DateTime("2012-10-28T01:00:00", timezone), builders); // day with dst shift -1h, 25h long - addNTimes(3, IDX_DST_END, new DateTime("2012-10-29T01:00:00", timezone), builders); - addNTimes(4, IDX_DST_END, new DateTime("2012-10-30T01:00:00", timezone), builders); + DateFormatter formatter = DateFormatter.forPattern("yyyy-MM-dd'T'HH:mm:ss").withZone(timezone); + addNTimes(1, IDX_DST_END, DateFormatters.toZonedDateTime(formatter.parse("2012-10-27T01:00:00")), builders); + // day with dst shift -1h, 25h long + addNTimes(2, IDX_DST_END, DateFormatters.toZonedDateTime(formatter.parse("2012-10-28T01:00:00")), builders); + addNTimes(3, IDX_DST_END, DateFormatters.toZonedDateTime(formatter.parse("2012-10-29T01:00:00")), builders); + addNTimes(4, IDX_DST_END, DateFormatters.toZonedDateTime(formatter.parse("2012-10-30T01:00:00")), builders); indexRandom(true, builders); ensureSearchable(); @@ -262,11 +274,24 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstEnd() throws Excepti List buckets = deriv.getBuckets(); assertThat(buckets.size(), equalTo(4)); - assertBucket(buckets.get(0), new DateTime("2012-10-27", timezone).toDateTime(DateTimeZone.UTC), 1L, nullValue(), null, null); - assertBucket(buckets.get(1), new DateTime("2012-10-28", timezone).toDateTime(DateTimeZone.UTC), 2L, notNullValue(), 1d, 1d / 24d); + DateFormatter dateFormatter = DateFormatter.forPattern("yyyy-MM-dd").withZone(ZoneOffset.UTC); + + ZonedDateTime expectedKeyFirstBucket = + LocalDate.from(dateFormatter.parse("2012-10-27")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null, null); + + ZonedDateTime expectedKeySecondBucket = + LocalDate.from(dateFormatter.parse("2012-10-28")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(1), expectedKeySecondBucket, 2L, notNullValue(), 1d, 1d / 24d); + // the following is normalized using a 25h bucket width - assertBucket(buckets.get(2), new DateTime("2012-10-29", timezone).toDateTime(DateTimeZone.UTC), 3L, notNullValue(), 1d, 1d / 25d); - assertBucket(buckets.get(3), new DateTime("2012-10-30", timezone).toDateTime(DateTimeZone.UTC), 4L, notNullValue(), 1d, 1d / 24d); + ZonedDateTime expectedKeyThirdBucket = + LocalDate.from(dateFormatter.parse("2012-10-29")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d, 1d / 25d); + + ZonedDateTime expectedKeyFourthBucket = + LocalDate.from(dateFormatter.parse("2012-10-30")).atStartOfDay(timezone).withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d, 1d / 24d); } /** @@ -275,14 +300,15 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstEnd() throws Excepti */ public void testSingleValuedFieldNormalised_timeZone_AsiaKathmandu() throws Exception { createIndex(IDX_DST_KATHMANDU); - DateTimeZone timezone = DateTimeZone.forID("Asia/Kathmandu"); + ZoneId timezone = ZoneId.of("Asia/Kathmandu"); List builders = new ArrayList<>(); - addNTimes(1, IDX_DST_KATHMANDU, new DateTime("1985-12-31T22:30:00", timezone), builders); + DateFormatter formatter = DateFormatter.forPattern("yyyy-MM-dd'T'HH:mm:ss").withZone(timezone); + addNTimes(1, IDX_DST_KATHMANDU, DateFormatters.toZonedDateTime(formatter.parse("1985-12-31T22:30:00")), builders); // the shift happens during the next bucket, which includes the 45min that do not start on the full hour - addNTimes(2, IDX_DST_KATHMANDU, new DateTime("1985-12-31T23:30:00", timezone), builders); - addNTimes(3, IDX_DST_KATHMANDU, new DateTime("1986-01-01T01:30:00", timezone), builders); - addNTimes(4, IDX_DST_KATHMANDU, new DateTime("1986-01-01T02:30:00", timezone), builders); + addNTimes(2, IDX_DST_KATHMANDU, DateFormatters.toZonedDateTime(formatter.parse("1985-12-31T23:30:00")), builders); + addNTimes(3, IDX_DST_KATHMANDU, DateFormatters.toZonedDateTime(formatter.parse("1986-01-01T01:30:00")), builders); + addNTimes(4, IDX_DST_KATHMANDU, DateFormatters.toZonedDateTime(formatter.parse("1986-01-01T02:30:00")), builders); indexRandom(true, builders); ensureSearchable(); @@ -301,27 +327,36 @@ public void testSingleValuedFieldNormalised_timeZone_AsiaKathmandu() throws Exce List buckets = deriv.getBuckets(); assertThat(buckets.size(), equalTo(4)); - assertBucket(buckets.get(0), new DateTime("1985-12-31T22:00:00", timezone).toDateTime(DateTimeZone.UTC), 1L, nullValue(), null, - null); - assertBucket(buckets.get(1), new DateTime("1985-12-31T23:00:00", timezone).toDateTime(DateTimeZone.UTC), 2L, notNullValue(), 1d, - 1d / 60d); + DateFormatter dateFormatter = DateFormatter.forPattern("yyyy-MM-dd'T'HH:mm:ss").withZone(ZoneOffset.UTC); + + ZonedDateTime expectedKeyFirstBucket = + LocalDateTime.from(dateFormatter.parse("1985-12-31T22:00:00")).atZone(timezone).withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null,null); + + ZonedDateTime expectedKeySecondBucket = + LocalDateTime.from(dateFormatter.parse("1985-12-31T23:00:00")).atZone(timezone).withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(1), expectedKeySecondBucket, 2L, notNullValue(), 1d,1d / 60d); + // the following is normalized using a 105min bucket width - assertBucket(buckets.get(2), new DateTime("1986-01-01T01:00:00", timezone).toDateTime(DateTimeZone.UTC), 3L, notNullValue(), 1d, - 1d / 105d); - assertBucket(buckets.get(3), new DateTime("1986-01-01T02:00:00", timezone).toDateTime(DateTimeZone.UTC), 4L, notNullValue(), 1d, - 1d / 60d); + ZonedDateTime expectedKeyThirdBucket = + LocalDateTime.from(dateFormatter.parse("1986-01-01T01:00:00")).atZone(timezone).withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d,1d / 105d); + + ZonedDateTime expectedKeyFourthBucket = + LocalDateTime.from(dateFormatter.parse("1986-01-01T02:00:00")).atZone(timezone).withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d,1d / 60d); } - private static void addNTimes(int amount, String index, DateTime dateTime, List builders) throws Exception { + private static void addNTimes(int amount, String index, ZonedDateTime dateTime, List builders) throws Exception { for (int i = 0; i < amount; i++) { builders.add(indexDoc(index, dateTime, 1)); } } - private static void assertBucket(Histogram.Bucket bucket, DateTime expectedKey, long expectedDocCount, + private static void assertBucket(Histogram.Bucket bucket, ZonedDateTime expectedKey, long expectedDocCount, Matcher derivativeMatcher, Double derivative, Double normalizedDerivative) { assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(expectedKey)); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(expectedKey)); assertThat(bucket.getDocCount(), equalTo(expectedDocCount)); Derivative docCountDeriv = bucket.getAggregations().get("deriv"); assertThat(docCountDeriv, derivativeMatcher); @@ -350,10 +385,10 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)histo).getProperty("_count"); Object[] propertiesCounts = (Object[]) ((InternalAggregation)histo).getProperty("sum.value"); - DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC); + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(key)); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); @@ -361,14 +396,14 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { assertThat(sum.getValue(), equalTo(1.0)); SimpleValue deriv = bucket.getAggregations().get("deriv"); assertThat(deriv, nullValue()); - assertThat((DateTime) propertiesKeys[0], equalTo(key)); + assertThat((ZonedDateTime) propertiesKeys[0], equalTo(key)); assertThat((long) propertiesDocCounts[0], equalTo(1L)); assertThat((double) propertiesCounts[0], equalTo(1.0)); - key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(key)); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(2L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); sum = bucket.getAggregations().get("sum"); @@ -379,14 +414,14 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { assertThat(deriv.value(), equalTo(4.0)); assertThat(((InternalMultiBucketAggregation.InternalBucket)bucket).getProperty( "histo", AggregationPath.parse("deriv.value").getPathElementsAsStringList()), equalTo(4.0)); - assertThat((DateTime) propertiesKeys[1], equalTo(key)); + assertThat((ZonedDateTime) propertiesKeys[1], equalTo(key)); assertThat((long) propertiesDocCounts[1], equalTo(2L)); assertThat((double) propertiesCounts[1], equalTo(5.0)); - key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(key)); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); sum = bucket.getAggregations().get("sum"); @@ -397,7 +432,7 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { assertThat(deriv.value(), equalTo(10.0)); assertThat(((InternalMultiBucketAggregation.InternalBucket)bucket).getProperty( "histo", AggregationPath.parse("deriv.value").getPathElementsAsStringList()), equalTo(10.0)); - assertThat((DateTime) propertiesKeys[2], equalTo(key)); + assertThat((ZonedDateTime) propertiesKeys[2], equalTo(key)); assertThat((long) propertiesDocCounts[2], equalTo(3L)); assertThat((double) propertiesCounts[2], equalTo(15.0)); } @@ -417,39 +452,39 @@ public void testMultiValuedField() throws Exception { List buckets = deriv.getBuckets(); assertThat(buckets.size(), equalTo(4)); - DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC); + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(key)); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(true)); SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); assertThat(docCountDeriv, nullValue()); - key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(key)); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); docCountDeriv = bucket.getAggregations().get("deriv"); assertThat(docCountDeriv, notNullValue()); assertThat(docCountDeriv.value(), equalTo(2.0)); - key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0,ZoneOffset.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(key)); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(5L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); docCountDeriv = bucket.getAggregations().get("deriv"); assertThat(docCountDeriv, notNullValue()); assertThat(docCountDeriv.value(), equalTo(2.0)); - key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(3); assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(key)); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); docCountDeriv = bucket.getAggregations().get("deriv"); @@ -487,29 +522,29 @@ public void testPartiallyUnmapped() throws Exception { List buckets = deriv.getBuckets(); assertThat(buckets.size(), equalTo(3)); - DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC); + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); Histogram.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(key)); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(true)); SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); assertThat(docCountDeriv, nullValue()); - key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(1); assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(key)); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(2L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); docCountDeriv = bucket.getAggregations().get("deriv"); assertThat(docCountDeriv, notNullValue()); assertThat(docCountDeriv.value(), equalTo(1.0)); - key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC); + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); bucket = buckets.get(2); assertThat(bucket, notNullValue()); - assertThat((DateTime) bucket.getKey(), equalTo(key)); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(3L)); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); docCountDeriv = bucket.getAggregations().get("deriv"); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnUnitTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnUnitTests.java index 341364edbcf32..b4ae26d5f13df 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnUnitTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnUnitTests.java @@ -30,6 +30,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -160,7 +161,7 @@ public double execute(Map params, double[] values) { } private static long asLong(String dateTime) { - return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(dateTime).getMillis(); + return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli(); } /** diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java index a5a3007818f7f..fd8c0d58caf9c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.common.rounding.Rounding; +import org.elasticsearch.common.Rounding; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.InternalOrder; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfigTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfigTests.java index 5007784a3d9a9..b929f222d94fe 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfigTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfigTests.java @@ -23,9 +23,9 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.Script; import org.elasticsearch.test.AbstractSerializingTestCase; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; import static org.hamcrest.Matchers.equalTo; @@ -40,7 +40,7 @@ protected MultiValuesSourceFieldConfig doParseInstance(XContentParser parser) th protected MultiValuesSourceFieldConfig createTestInstance() { String field = randomAlphaOfLength(10); Object missing = randomBoolean() ? randomAlphaOfLength(10) : null; - DateTimeZone timeZone = randomBoolean() ? randomDateTimeZone() : null; + ZoneId timeZone = randomBoolean() ? randomZone() : null; return new MultiValuesSourceFieldConfig.Builder() .setFieldName(field).setMissing(missing).setScript(null).setTimeZone(timeZone).build(); } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 03e2a8e8248b9..e111abe0d5132 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.fetch.subphase.highlight; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockTokenizer; @@ -35,6 +34,7 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -2815,9 +2815,11 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { .setSettings(Settings.builder().put("index.number_of_replicas", 0).put("index.number_of_shards", 2)) .get()); ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); - indexRandom(true, client().prepareIndex("index-1", "type", "1").setSource("d", now, "field", "hello world"), - client().prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1), "field", "hello"), - client().prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2), "field", "world")); + DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time"); + indexRandom(true, + client().prepareIndex("index-1", "type", "1").setSource("d", formatter.format(now), "field", "hello world"), + client().prepareIndex("index-1", "type", "2").setSource("d", formatter.format(now.minusDays(1)), "field", "hello"), + client().prepareIndex("index-1", "type", "3").setSource("d", formatter.format(now.minusDays(2)), "field", "world")); ensureSearchable("index-1"); for (int i = 0; i < 5; i++) { final SearchResponse r1 = client().prepareSearch("index-1") diff --git a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java index 8a90ca0b8ca47..fc69df5987aff 100644 --- a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -28,9 +28,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.document.DocumentField; -import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.time.DateFormatters; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -557,7 +556,7 @@ public void testStoredFieldsWithoutSource() throws Exception { .field("long_field", 4L) .field("float_field", 5.0f) .field("double_field", 6.0d) - .field("date_field", DateFormatters.forPattern("dateOptionalTime").format(date)) + .field("date_field", DateFormatter.forPattern("dateOptionalTime").format(date)) .field("boolean_field", true) .field("binary_field", Base64.getEncoder().encodeToString("testing text".getBytes("UTF-8"))) .endObject()).get(); @@ -589,7 +588,7 @@ public void testStoredFieldsWithoutSource() throws Exception { assertThat(searchHit.getFields().get("long_field").getValue(), equalTo((Object) 4L)); assertThat(searchHit.getFields().get("float_field").getValue(), equalTo((Object) 5.0f)); assertThat(searchHit.getFields().get("double_field").getValue(), equalTo((Object) 6.0d)); - String dateTime = DateFormatters.forPattern("dateOptionalTime").format(date); + String dateTime = DateFormatter.forPattern("dateOptionalTime").format(date); assertThat(searchHit.getFields().get("date_field").getValue(), equalTo((Object) dateTime)); assertThat(searchHit.getFields().get("boolean_field").getValue(), equalTo((Object) Boolean.TRUE)); assertThat(searchHit.getFields().get("binary_field").getValue(), equalTo(new BytesArray("testing text" .getBytes("UTF8")))); @@ -769,7 +768,7 @@ public void testDocValueFields() throws Exception { .field("long_field", 4L) .field("float_field", 5.0f) .field("double_field", 6.0d) - .field("date_field", DateFormatters.forPattern("dateOptionalTime").format(date)) + .field("date_field", DateFormatter.forPattern("dateOptionalTime").format(date)) .field("boolean_field", true) .field("binary_field", new byte[] {42, 100}) .field("ip_field", "::1") @@ -869,7 +868,7 @@ public void testDocValueFields() throws Exception { assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0)); assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d)); assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(), - equalTo(DateFormatters.forPattern("dateOptionalTime").format(date))); + equalTo(DateFormatter.forPattern("dateOptionalTime").format(date))); assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true)); assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo")); assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo")); @@ -898,9 +897,8 @@ public void testDocValueFields() throws Exception { assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo("4.0")); assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo("5.0")); assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo("6.0")); - // TODO: switch to java date formatter, but will require special casing java 8 as there is a bug with epoch formatting there assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(), - equalTo(Joda.forPattern("epoch_millis").format(date))); + equalTo(DateFormatter.forPattern("epoch_millis").format(date))); } public void testScriptFields() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 0c2b0829c5f75..b7d51798ab7df 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -25,9 +25,11 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.common.Strings; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -50,11 +52,10 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.ISODateTimeFormat; import java.io.IOException; +import java.time.Instant; +import java.time.ZoneId; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; @@ -433,14 +434,14 @@ public void testDateRangeInQueryStringWithTimeZone_7880() { "type", "past", "type=date" )); - DateTimeZone timeZone = randomDateTimeZone(); - String now = ISODateTimeFormat.dateTime().print(new DateTime(timeZone)); - logger.info(" --> Using time_zone [{}], now is [{}]", timeZone.getID(), now); + ZoneId timeZone = randomZone(); + String now = DateFormatter.forPattern("strict_date_optional_time").format(Instant.now().atZone(timeZone)); + logger.info(" --> Using time_zone [{}], now is [{}]", timeZone.getId(), now); client().prepareIndex("test", "type", "1").setSource("past", now).get(); refresh(); SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("past:[now-1m/m TO now+1m/m]") - .timeZone(timeZone.getID())).get(); + .timeZone(timeZone.getId())).get(); assertHitCount(searchResponse, 1L); } @@ -1555,21 +1556,20 @@ public void testQueryStringWithSlopAndFields() { } } - public void testDateProvidedAsNumber() throws ExecutionException, InterruptedException { + public void testDateProvidedAsNumber() throws InterruptedException { createIndex("test"); assertAcked(client().admin().indices().preparePutMapping("test").setType("type") - .setSource("field", "type=date,format=epoch_millis").get()); - indexRandom(true, client().prepareIndex("test", "type", "1").setSource("field", -1000000000001L), - client().prepareIndex("test", "type", "2").setSource("field", -1000000000000L), - client().prepareIndex("test", "type", "3").setSource("field", -999999999999L), - client().prepareIndex("test", "type", "4").setSource("field", -1000000000001.0123456789), - client().prepareIndex("test", "type", "5").setSource("field", -1000000000000.0123456789), - client().prepareIndex("test", "type", "6").setSource("field", -999999999999.0123456789)); - - - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(rangeQuery("field").lte(-1000000000000L)).get(), 4); - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(rangeQuery("field").lte(-999999999999L)).get(), 6); - + .setSource("field", "type=date,format=epoch_millis").get()); + indexRandom(true, + client().prepareIndex("test", "type", "1").setSource("field", 1000000000001L), + client().prepareIndex("test", "type", "2").setSource("field", 1000000000000L), + client().prepareIndex("test", "type", "3").setSource("field", 999999999999L), + client().prepareIndex("test", "type", "4").setSource("field", 1000000000002L), + client().prepareIndex("test", "type", "5").setSource("field", 1000000000003L), + client().prepareIndex("test", "type", "6").setSource("field", 999999999999L)); + + assertHitCount(client().prepareSearch("test").setSize(0).setQuery(rangeQuery("field").gte(1000000000000L)).get(), 4); + assertHitCount(client().prepareSearch("test").setSize(0).setQuery(rangeQuery("field").gte(999999999999L)).get(), 6); } public void testRangeQueryWithTimeZone() throws Exception { @@ -1582,7 +1582,7 @@ public void testRangeQueryWithTimeZone() throws Exception { client().prepareIndex("test", "type1", "3").setSource("date", "2014-01-01T01:00:00", "num", 3), // Now in UTC+1 client().prepareIndex("test", "type1", "4") - .setSource("date", DateTime.now(DateTimeZone.forOffsetHours(1)).getMillis(), "num", 4)); + .setSource("date", Instant.now().atZone(ZoneOffset.ofHours(1)).toInstant().toEpochMilli(), "num", 4)); SearchResponse searchResponse = client().prepareSearch("test") .setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T00:00:00").to("2014-01-01T00:59:00")) @@ -1634,12 +1634,6 @@ public void testRangeQueryWithTimeZone() throws Exception { assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getId(), is("3")); - // When we use long values, it means we have ms since epoch UTC based so we don't apply any transformation - expectThrows(SearchPhaseExecutionException.class, () -> - client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("date").from(1388534400000L).to(1388537940999L).timeZone("+01:00")) - .get()); - searchResponse = client().prepareSearch("test") .setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01").to("2014-01-01T00:59:00").timeZone("-01:00")) .get(); @@ -1653,6 +1647,36 @@ public void testRangeQueryWithTimeZone() throws Exception { assertThat(searchResponse.getHits().getAt(0).getId(), is("4")); } + public void testRangeQueryWithLocaleMapping() throws Exception { + assumeTrue("need java 9 for testing ",JavaVersion.current().compareTo(JavaVersion.parse("9")) >= 0); + + assertAcked(prepareCreate("test") + .addMapping("type1", jsonBuilder().startObject().startObject("properties").startObject("date_field") + .field("type", "date") + .field("format", "E, d MMM yyyy HH:mm:ss Z") + .field("locale", "de") + .endObject().endObject().endObject())); + + indexRandom(true, + client().prepareIndex("test", "type1", "1").setSource("date_field", "Mi., 06 Dez. 2000 02:55:00 -0800"), + client().prepareIndex("test", "type1", "2").setSource("date_field", "Do., 07 Dez. 2000 02:55:00 -0800") + ); + + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(QueryBuilders.rangeQuery("date_field") + .gte("Di., 05 Dez. 2000 02:55:00 -0800") + .lte("Do., 07 Dez. 2000 00:00:00 -0800")) + .get(); + assertHitCount(searchResponse, 1L); + + searchResponse = client().prepareSearch("test") + .setQuery(QueryBuilders.rangeQuery("date_field") + .gte("Di., 05 Dez. 2000 02:55:00 -0800") + .lte("Fr., 08 Dez. 2000 00:00:00 -0800")) + .get(); + assertHitCount(searchResponse, 2L); + } + public void testSearchEmptyDoc() { assertAcked(prepareCreate("test").setSettings("{\"index.analysis.analyzer.default.type\":\"keyword\"}", XContentType.JSON)); client().prepareIndex("test", "type1", "1").setSource("{}", XContentType.JSON).get(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializingTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializingTestCase.java index 22ed586043d83..58107cbc7318a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializingTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializingTestCase.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; +import java.util.Date; import java.util.function.Predicate; import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; @@ -101,4 +102,10 @@ protected boolean assertToXContentEquivalence() { return true; } + /** + * @return a random date between 1970 and ca 2065 + */ + protected Date randomDate() { + return new Date(randomLongBetween(0, 3000000000000L)); + } } diff --git a/x-pack/license-tools/src/test/java/org/elasticsearch/license/licensor/TestUtils.java b/x-pack/license-tools/src/test/java/org/elasticsearch/license/licensor/TestUtils.java index 3f88ce13f697d..476ba8d26bb0a 100644 --- a/x-pack/license-tools/src/test/java/org/elasticsearch/license/licensor/TestUtils.java +++ b/x-pack/license-tools/src/test/java/org/elasticsearch/license/licensor/TestUtils.java @@ -20,6 +20,7 @@ import java.io.IOException; import java.nio.file.Path; +import java.time.ZoneOffset; import java.util.UUID; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; @@ -34,9 +35,8 @@ public class TestUtils { public static final String PUBLIC_KEY_RESOURCE = "/public.key"; public static final String PRIVATE_KEY_RESOURCE = "/private.key"; - private static final DateFormatter formatDateTimeFormatter = - DateFormatter.forPattern("yyyy-MM-dd"); - private static final DateMathParser dateMathParser = formatDateTimeFormatter.toDateMathParser(); + private static final DateFormatter dateFormatter = DateFormatter.forPattern("yyyy-MM-dd"); + private static final DateMathParser dateMathParser = dateFormatter.toDateMathParser(); public static String dumpLicense(License license) throws Exception { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); @@ -49,11 +49,11 @@ public static String dumpLicense(License license) throws Exception { } public static String dateMathString(String time, final long now) { - return formatDateTimeFormatter.formatMillis(dateMathParser.parse(time, () -> now)); + return dateFormatter.format(dateMathParser.parse(time, () -> now).atZone(ZoneOffset.UTC)); } public static long dateMath(String time, final long now) { - return dateMathParser.parse(time, () -> now); + return dateMathParser.parse(time, () -> now).toEpochMilli(); } public static LicenseSpec generateRandomLicenseSpec(int version) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DateUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DateUtils.java index 2769b76b65f10..9cec2b4cc7c1f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DateUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DateUtils.java @@ -5,39 +5,37 @@ */ package org.elasticsearch.license; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.time.DateFormatter; -import org.joda.time.MutableDateTime; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; +import org.elasticsearch.common.time.DateFormatters; import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.temporal.ChronoField; public class DateUtils { private static final DateFormatter dateOnlyFormatter = DateFormatter.forPattern("yyyy-MM-dd").withZone(ZoneOffset.UTC); - - private static final DateTimeFormatter dateTimeFormatter = ISODateTimeFormat.dateTime().withZoneUTC(); + private static final DateFormatter dateTimeFormatter = DateFormatter.forPattern("strict_date_time").withZone(ZoneOffset.UTC); public static long endOfTheDay(String date) { try { // Try parsing using complete date/time format - return dateTimeFormatter.parseDateTime(date).getMillis(); - } catch (IllegalArgumentException ex) { - // Fall back to the date only format - MutableDateTime dateTime = new MutableDateTime(dateOnlyFormatter.parseMillis(date)); - dateTime.millisOfDay().set(dateTime.millisOfDay().getMaximumValue()); - return dateTime.getMillis(); + return dateTimeFormatter.parseMillis(date); + } catch (ElasticsearchParseException | IllegalArgumentException ex) { + ZonedDateTime dateTime = DateFormatters.toZonedDateTime(dateOnlyFormatter.parse(date)); + dateTime.with(ChronoField.MILLI_OF_DAY, ChronoField.MILLI_OF_DAY.range().getMaximum()); + return dateTime.toInstant().toEpochMilli(); } } public static long beginningOfTheDay(String date) { try { // Try parsing using complete date/time format - return dateTimeFormatter.parseDateTime(date).getMillis(); - } catch (IllegalArgumentException ex) { + return dateTimeFormatter.parseMillis(date); + } catch (ElasticsearchParseException | IllegalArgumentException ex) { // Fall back to the date only format - return dateOnlyFormatter.parseMillis(date); + return DateFormatters.toZonedDateTime(dateOnlyFormatter.parse(date)).toInstant().toEpochMilli(); } - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java index ef284e1394205..15531fce7a1ca 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java @@ -91,7 +91,7 @@ static long parseDateOrThrow(String date, ParseField paramName, LongSupplier now DateMathParser dateMathParser = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser(); try { - return dateMathParser.parse(date, now); + return dateMathParser.parse(date, now).toEpochMilli(); } catch (Exception e) { String msg = Messages.getMessage(Messages.REST_INVALID_DATETIME_PARAMS, paramName.getPreferredName(), date); throw new ElasticsearchParseException(msg, e); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java index d33280dcac3d2..12faa157eeb9a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java @@ -162,7 +162,7 @@ static long parseDateOrThrow(String date, ParseField paramName, LongSupplier now DateMathParser dateMathParser = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser(); try { - return dateMathParser.parse(date, now); + return dateMathParser.parse(date, now).toEpochMilli(); } catch (Exception e) { String msg = Messages.getMessage(Messages.REST_INVALID_DATETIME_PARAMS, paramName.getPreferredName(), date); throw new ElasticsearchParseException(msg, e); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java index f0ba07ad15c68..bb1faeddd8298 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java @@ -10,7 +10,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.common.rounding.DateTimeUnit; +import org.elasticsearch.common.Rounding; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -22,9 +22,9 @@ import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneOffset; import java.util.Arrays; import java.util.Collection; import java.util.concurrent.TimeUnit; @@ -128,7 +128,7 @@ public static long getHistogramIntervalMillis(AggregationBuilder histogramAggreg * an {@link ElasticsearchException} with the validation error */ private static long validateAndGetDateHistogramInterval(DateHistogramAggregationBuilder dateHistogram) { - if (dateHistogram.timeZone() != null && dateHistogram.timeZone().equals(DateTimeZone.UTC) == false) { + if (dateHistogram.timeZone() != null && dateHistogram.timeZone().normalized().equals(ZoneOffset.UTC) == false) { throw ExceptionsHelper.badRequestException("ML requires date_histogram.time_zone to be UTC"); } @@ -141,7 +141,7 @@ private static long validateAndGetDateHistogramInterval(DateHistogramAggregation public static long validateAndGetCalendarInterval(String calendarInterval) { TimeValue interval; - DateTimeUnit dateTimeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(calendarInterval); + Rounding.DateTimeUnit dateTimeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(calendarInterval); if (dateTimeUnit != null) { switch (dateTimeUnit) { case WEEK_OF_WEEKYEAR: @@ -161,7 +161,7 @@ public static long validateAndGetCalendarInterval(String calendarInterval) { break; case MONTH_OF_YEAR: case YEAR_OF_CENTURY: - case QUARTER: + case QUARTER_OF_YEAR: throw ExceptionsHelper.badRequestException(invalidDateHistogramCalendarIntervalMessage(calendarInterval)); default: throw ExceptionsHelper.badRequestException("Unexpected dateTimeUnit [" + dateTimeUnit + "]"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/TimeUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/TimeUtils.java index 6434b0ff2b98d..ea0994dad717c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/TimeUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/TimeUtils.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.core.ml.utils.time; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; @@ -15,6 +16,7 @@ import java.util.concurrent.TimeUnit; public final class TimeUtils { + private TimeUtils() { // Do nothing } @@ -55,7 +57,7 @@ public static long dateStringToEpoch(String date) { try { return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date); - } catch (IllegalArgumentException e) { + } catch (ElasticsearchParseException | IllegalArgumentException e) { } // Could not do the conversion return -1; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java index 166322b93722c..f4fee8acc3d1f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java @@ -9,12 +9,11 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Rounding; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.rounding.DateTimeUnit; -import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -22,9 +21,9 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; import java.util.Map; import java.util.Objects; @@ -82,7 +81,7 @@ public DateHistogramGroupConfig(final String field, final DateHistogramInterval * The {@code field} and {@code interval} are required to compute the date histogram for the rolled up documents. * The {@code delay} is optional and can be set to {@code null}. It defines how long to wait before rolling up new documents. * The {@code timeZone} is optional and can be set to {@code null}. When configured, the time zone value is resolved using - * ({@link DateTimeZone#forID(String)} and must match a time zone identifier provided by the Joda Time library. + * ({@link ZoneId#of(String)} and must match a time zone identifier. *

* @param field the name of the date field to use for the date histogram (required) * @param interval the interval to use for the date histogram (required) @@ -229,23 +228,14 @@ public static DateHistogramGroupConfig fromXContent(final XContentParser parser) } private static Rounding createRounding(final String expr, final String timeZone) { - DateTimeUnit timeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(expr); + Rounding.DateTimeUnit timeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(expr); final Rounding.Builder rounding; if (timeUnit != null) { rounding = new Rounding.Builder(timeUnit); } else { rounding = new Rounding.Builder(TimeValue.parseTimeValue(expr, "createRounding")); } - rounding.timeZone(toDateTimeZone(timeZone)); + rounding.timeZone(ZoneId.of(timeZone)); return rounding.build(); } - - private static DateTimeZone toDateTimeZone(final String timezone) { - try { - return DateTimeZone.forOffsetHours(Integer.parseInt(timezone)); - } catch (NumberFormatException e) { - return DateTimeZone.forID(timezone); - } - } - } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java index f81c7955abbc4..e67baeaad3916 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java @@ -91,7 +91,7 @@ public static DateTime parseDateMathOrNull(String fieldName, XContentParser pars } public static DateTime parseDateMath(String valueString, DateTimeZone timeZone, final Clock clock) { - return new DateTime(dateMathParser.parse(valueString, clock::millis), timeZone); + return new DateTime(dateMathParser.parse(valueString, clock::millis).toEpochMilli(), timeZone); } public static DateTime parseDate(String fieldName, XContentParser parser, DateTimeZone timeZone) throws IOException { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/RewriteCachingDirectoryReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/RewriteCachingDirectoryReaderTests.java index 6812aca474749..2219a78055544 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/RewriteCachingDirectoryReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/index/engine/RewriteCachingDirectoryReaderTests.java @@ -15,9 +15,9 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneOffset; public class RewriteCachingDirectoryReaderTests extends ESTestCase { @@ -92,15 +92,15 @@ public void testIsWithinQuery() throws IOException { dateFieldType.setName("test"); QueryRewriteContext context = new QueryRewriteContext(xContentRegistry(), writableRegistry(), null, () -> 0); MappedFieldType.Relation relation = dateFieldType.isFieldWithinQuery(cachingDirectoryReader, 0, 10, - true, true, DateTimeZone.UTC, null, context); + true, true, ZoneOffset.UTC, null, context); assertEquals(relation, MappedFieldType.Relation.WITHIN); relation = dateFieldType.isFieldWithinQuery(cachingDirectoryReader, 3, 11, - true, true, DateTimeZone.UTC, null, context); + true, true, ZoneOffset.UTC, null, context); assertEquals(relation, MappedFieldType.Relation.INTERSECTS); relation = dateFieldType.isFieldWithinQuery(cachingDirectoryReader, 10, 11, - false, true, DateTimeZone.UTC, null, context); + false, true, ZoneOffset.UTC, null, context); assertEquals(relation, MappedFieldType.Relation.DISJOINT); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java index 230fd75fbb958..47dad7e18eb32 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java @@ -29,6 +29,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.List; import java.util.UUID; @@ -51,11 +52,11 @@ public class TestUtils { private static final DateMathParser dateMathParser = formatDateTimeFormatter.toDateMathParser(); public static String dateMathString(String time, final long now) { - return formatDateTimeFormatter.formatMillis(dateMathParser.parse(time, () -> now)); + return formatDateTimeFormatter.format(dateMathParser.parse(time, () -> now).atZone(ZoneOffset.UTC)); } public static long dateMath(String time, final long now) { - return dateMathParser.parse(time, () -> now); + return dateMathParser.parse(time, () -> now).toEpochMilli(); } public static LicenseSpec generateRandomLicenseSpec(int version) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java index cb2f13e804253..788870013885e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java @@ -48,15 +48,15 @@ import org.elasticsearch.xpack.core.ml.datafeed.ChunkingConfig.Mode; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ToXContentParams; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.TimeZone; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -449,7 +449,7 @@ public void testBuild_GivenHistogramWithDefaultInterval() { public void testBuild_GivenDateHistogramWithInvalidTimeZone() { MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time"); DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("bucket").field("time") - .interval(300000L).timeZone(DateTimeZone.forTimeZone(TimeZone.getTimeZone("EST"))).subAggregation(maxTime); + .interval(300000L).timeZone(ZoneId.of("CET")).subAggregation(maxTime); ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> createDatafeedWithDateHistogram(dateHistogram)); @@ -650,7 +650,7 @@ public void testSerializationOfComplexAggs() throws IOException { new Script("params.bytes > 0 ? params.bytes : null")); DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("histogram_buckets") - .field("timestamp").interval(300000).timeZone(DateTimeZone.UTC) + .field("timestamp").interval(300000).timeZone(ZoneOffset.UTC) .subAggregation(maxTime) .subAggregation(avgAggregationBuilder) .subAggregation(derivativePipelineAggregationBuilder) @@ -701,7 +701,7 @@ public void testSerializationOfComplexAggsBetweenVersions() throws IOException { new Script("params.bytes > 0 ? params.bytes : null")); DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("histogram_buckets") - .field("timestamp").interval(300000).timeZone(DateTimeZone.UTC) + .field("timestamp").interval(300000).timeZone(ZoneOffset.UTC) .subAggregation(maxTime) .subAggregation(avgAggregationBuilder) .subAggregation(derivativePipelineAggregationBuilder) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java index 7770def0fae9a..532468216e5aa 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java @@ -14,11 +14,12 @@ import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTimeZone; -import java.util.TimeZone; +import java.time.ZoneId; +import java.time.ZoneOffset; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; public class ExtractorUtilsTests extends ESTestCase { @@ -73,13 +74,21 @@ public void testGetHistogramAggregation_MissingHistogramAgg() { public void testGetHistogramIntervalMillis_GivenDateHistogramWithInvalidTimeZone() { MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time"); DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("bucket").field("time") - .interval(300000L).timeZone(DateTimeZone.forTimeZone(TimeZone.getTimeZone("EST"))).subAggregation(maxTime); + .interval(300000L).timeZone(ZoneId.of("CET")).subAggregation(maxTime); ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> ExtractorUtils.getHistogramIntervalMillis(dateHistogram)); assertThat(e.getMessage(), equalTo("ML requires date_histogram.time_zone to be UTC")); } + public void testGetHistogramIntervalMillis_GivenUtcTimeZones() { + MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time"); + ZoneId zone = randomFrom(ZoneOffset.UTC, ZoneId.of("UTC")); + DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram("bucket").field("time") + .interval(300000L).timeZone(zone).subAggregation(maxTime); + assertThat(ExtractorUtils.getHistogramIntervalMillis(dateHistogram), is(300_000L)); + } + public void testIsHistogram() { assertTrue(ExtractorUtils.isHistogram(AggregationBuilders.dateHistogram("time"))); assertTrue(ExtractorUtils.isHistogram(AggregationBuilders.histogram("time"))); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java index d892eb550a17a..605ea6e901a90 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java @@ -28,7 +28,7 @@ import static com.carrotsearch.randomizedtesting.generators.RandomNumbers.randomIntBetween; import static com.carrotsearch.randomizedtesting.generators.RandomPicks.randomFrom; import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiAlphanumOfLengthBetween; -import static org.elasticsearch.test.ESTestCase.randomDateTimeZone; +import static org.elasticsearch.test.ESTestCase.randomZone; public class ConfigTestHelpers { @@ -71,7 +71,7 @@ public static DateHistogramGroupConfig randomDateHistogramGroupConfig(final Rand final String field = randomField(random); final DateHistogramInterval interval = randomInterval(); final DateHistogramInterval delay = random.nextBoolean() ? randomInterval() : null; - final String timezone = random.nextBoolean() ? randomDateTimeZone().toString() : null; + String timezone = random.nextBoolean() ? randomZone().getId() : null; return new DateHistogramGroupConfig(field, interval, delay, timezone); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfigSerializingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfigSerializingTests.java index 415e1a00a60cf..95df682ff5e14 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfigSerializingTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfigSerializingTests.java @@ -14,9 +14,9 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.ZoneId; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -155,28 +155,28 @@ public void testBwcSerialization() throws IOException { DateHistogramInterval interval = new DateHistogramInterval(in); String field = in.readString(); DateHistogramInterval delay = in.readOptionalWriteable(DateHistogramInterval::new); - DateTimeZone timeZone = in.readTimeZone(); + ZoneId timeZone = in.readZoneId(); - assertEqualInstances(reference, new DateHistogramGroupConfig(field, interval, delay, timeZone.getID())); + assertEqualInstances(reference, new DateHistogramGroupConfig(field, interval, delay, timeZone.getId())); } for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) { final String field = ConfigTestHelpers.randomField(random()); final DateHistogramInterval interval = ConfigTestHelpers.randomInterval(); final DateHistogramInterval delay = randomBoolean() ? ConfigTestHelpers.randomInterval() : null; - final DateTimeZone timezone = randomDateTimeZone(); + final ZoneId timezone = randomZone(); // previous way to serialize a DateHistogramGroupConfig final BytesStreamOutput out = new BytesStreamOutput(); interval.writeTo(out); out.writeString(field); out.writeOptionalWriteable(delay); - out.writeTimeZone(timezone); + out.writeZoneId(timezone); final StreamInput in = out.bytes().streamInput(); DateHistogramGroupConfig deserialized = new DateHistogramGroupConfig(in); - assertEqualInstances(new DateHistogramGroupConfig(field, interval, delay, timezone.getID()), deserialized); + assertEqualInstances(new DateHistogramGroupConfig(field, interval, delay, timezone.getId()), deserialized); } } } diff --git a/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java b/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java index 1453f59ed43e4..454a3eb06e5a7 100644 --- a/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java +++ b/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java @@ -13,8 +13,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.ml.MachineLearning; -import org.joda.time.DateTime; +import java.time.Clock; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; @@ -266,7 +268,7 @@ public void testHRDSplit() throws Exception { "\"time\": { \"type\": \"date\" } }"); // Index some data - DateTime baseTime = new DateTime().minusYears(1); + ZonedDateTime baseTime = ZonedDateTime.now(Clock.systemDefaultZone()).minusYears(1); TestConfiguration test = tests.get(randomInt(tests.size()-1)); // domainSplit() tests had subdomain, testHighestRegisteredDomainCases() did not, so we need a special case for sub @@ -276,18 +278,20 @@ public void testHRDSplit() throws Exception { for (int i = 0; i < 100; i++) { - DateTime time = baseTime.plusHours(i); + ZonedDateTime time = baseTime.plusHours(i); if (i == 64) { // Anomaly has 100 docs, but we don't care about the value for (int j = 0; j < 100; j++) { - Request createDocRequest = new Request("PUT", "/painless/_doc/" + time.toDateTimeISO() + "_" + j); - createDocRequest.setJsonEntity("{\"domain\": \"" + "bar.bar.com\", \"time\": \"" + time.toDateTimeISO() + "\"}"); + String formattedTime = time.format(DateTimeFormatter.ISO_DATE_TIME); + Request createDocRequest = new Request("PUT", "/painless/_doc/" + formattedTime + "_" + j); + createDocRequest.setJsonEntity("{\"domain\": \"" + "bar.bar.com\", \"time\": \"" + formattedTime + "\"}"); client().performRequest(createDocRequest); } } else { // Non-anomalous values will be what's seen when the anomaly is reported - Request createDocRequest = new Request("PUT", "/painless/_doc/" + time.toDateTimeISO()); - createDocRequest.setJsonEntity("{\"domain\": \"" + test.hostName + "\", \"time\": \"" + time.toDateTimeISO() + "\"}"); + String formattedTime = time.format(DateTimeFormatter.ISO_DATE_TIME); + Request createDocRequest = new Request("PUT", "/painless/_doc/" + formattedTime); + createDocRequest.setJsonEntity("{\"domain\": \"" + test.hostName + "\", \"time\": \"" + formattedTime + "\"}"); client().performRequest(createDocRequest); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java index 190933b1e9316..5b9852ba4fddc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlDailyMaintenanceService.java @@ -16,9 +16,9 @@ import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.action.DeleteExpiredDataAction; -import org.joda.time.DateTime; -import org.joda.time.chrono.ISOChronology; +import java.time.Clock; +import java.time.ZonedDateTime; import java.util.Objects; import java.util.Random; import java.util.concurrent.ScheduledFuture; @@ -70,9 +70,14 @@ public MlDailyMaintenanceService(ClusterName clusterName, ThreadPool threadPool, private static TimeValue delayToNextTime(ClusterName clusterName) { Random random = new Random(clusterName.hashCode()); int minutesOffset = random.ints(0, MAX_TIME_OFFSET_MINUTES).findFirst().getAsInt(); - DateTime now = DateTime.now(ISOChronology.getInstance()); - DateTime next = now.plusDays(1).withTimeAtStartOfDay().plusMinutes(30).plusMinutes(minutesOffset); - return TimeValue.timeValueMillis(next.getMillis() - now.getMillis()); + + ZonedDateTime now = ZonedDateTime.now(Clock.systemDefaultZone()); + ZonedDateTime next = now.plusDays(1) + .toLocalDate() + .atStartOfDay(now.getZone()) + .plusMinutes(30) + .plusMinutes(minutesOffset); + return TimeValue.timeValueMillis(next.toInstant().toEpochMilli() - now.toInstant().toEpochMilli()); } public void start() { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java index 35878f1199586..85f2489e6b0e5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java @@ -128,7 +128,6 @@ Long runLookBack(long startTime, Long endTime) throws Exception { auditor.info(jobId, msg); LOGGER.info("[{}] {}", jobId, msg); - FlushJobAction.Request request = new FlushJobAction.Request(jobId); request.setCalcInterim(true); run(lookbackStartTimeMs, lookbackEnd, request); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java index 86fe439ac16cb..f8fa3b1874808 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java @@ -17,11 +17,11 @@ import org.elasticsearch.xpack.core.ml.action.GetBucketsAction; import org.elasticsearch.xpack.core.ml.action.util.PageParams; import org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils; -import org.elasticsearch.xpack.ml.datafeed.delayeddatacheck.DelayedDataDetectorFactory.BucketWithMissingData; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.utils.Intervals; -import org.joda.time.DateTime; +import org.elasticsearch.xpack.ml.datafeed.delayeddatacheck.DelayedDataDetectorFactory.BucketWithMissingData; +import java.time.ZonedDateTime; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -131,8 +131,8 @@ private Map checkCurrentBucketEventCount(long start, long end) { } private static long toHistogramKeyToEpoch(Object key) { - if (key instanceof DateTime) { - return ((DateTime)key).getMillis(); + if (key instanceof ZonedDateTime) { + return ((ZonedDateTime)key).toInstant().toEpochMilli(); } else if (key instanceof Double) { return ((Double)key).longValue(); } else if (key instanceof Long){ diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java index db8dea22675f2..8cf3ed39651d6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java @@ -21,10 +21,10 @@ import org.elasticsearch.search.aggregations.metrics.Percentiles; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.job.messages.Messages; -import org.joda.time.DateTime; import java.io.IOException; import java.io.OutputStream; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; @@ -210,15 +210,15 @@ private void processDateHistogram(Histogram agg) throws IOException { } /* - * Date Histograms have a {@link DateTime} object as the key, + * Date Histograms have a {@link ZonedDateTime} object as the key, * Histograms have either a Double or Long. */ private long toHistogramKeyToEpoch(Object key) { - if (key instanceof DateTime) { - return ((DateTime)key).getMillis(); + if (key instanceof ZonedDateTime) { + return ((ZonedDateTime)key).toInstant().toEpochMilli(); } else if (key instanceof Double) { return ((Double)key).longValue(); - } else if (key instanceof Long){ + } else if (key instanceof Long) { return (Long)key; } else { throw new IllegalStateException("Histogram key [" + key + "] cannot be converted to a timestamp"); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java index 232cd53a359ce..4223bff49825e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedField.java @@ -8,7 +8,6 @@ import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; -import org.joda.time.base.BaseDateTime; import java.util.List; import java.util.Map; @@ -112,8 +111,6 @@ public Object[] value(SearchHit hit) { } if (value[0] instanceof String) { // doc_value field with the epoch_millis format value[0] = Long.parseLong((String) value[0]); - } else if (value[0] instanceof BaseDateTime) { // script field - value[0] = ((BaseDateTime) value[0]).getMillis(); } else if (value[0] instanceof Long == false) { // pre-6.0 field throw new IllegalStateException("Unexpected value for a time field: " + value[0].getClass()); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java index 204ae42720433..dd9a6229ec887 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java @@ -14,8 +14,8 @@ import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.results.OverallBucket; import org.elasticsearch.xpack.core.ml.job.results.Result; -import org.joda.time.DateTime; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Date; import java.util.List; @@ -64,8 +64,8 @@ public List computeOverallBuckets(Histogram histogram) { } private static Date getHistogramBucketTimestamp(Histogram.Bucket bucket) { - DateTime bucketTimestamp = (DateTime) bucket.getKey(); - return new Date(bucketTimestamp.getMillis()); + ZonedDateTime bucketTimestamp = (ZonedDateTime) bucket.getKey(); + return new Date(bucketTimestamp.toInstant().toEpochMilli()); } static class TopNScores extends PriorityQueue { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java index b595c564ab9aa..be50114fc46e0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemover.java @@ -15,9 +15,9 @@ import org.elasticsearch.xpack.core.ml.job.results.Result; import org.elasticsearch.xpack.ml.job.persistence.BatchedJobsIterator; import org.elasticsearch.xpack.ml.utils.VolatileCursorIterator; -import org.joda.time.DateTime; -import org.joda.time.chrono.ISOChronology; +import java.time.Clock; +import java.time.Instant; import java.util.Deque; import java.util.Iterator; import java.util.List; @@ -71,7 +71,7 @@ private WrappedBatchedJobsIterator newJobIterator() { } private long calcCutoffEpochMs(long retentionDays) { - long nowEpochMs = DateTime.now(ISOChronology.getInstance()).getMillis(); + long nowEpochMs = Instant.now(Clock.systemDefaultZone()).toEpochMilli(); return nowEpochMs - new TimeValue(retentionDays, TimeUnit.DAYS).getMillis(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java index 3de9795deb335..3225a7eb9212e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java @@ -35,11 +35,11 @@ import org.elasticsearch.xpack.core.ml.job.results.ForecastRequestStats; import org.elasticsearch.xpack.core.ml.job.results.Result; import org.elasticsearch.xpack.ml.MachineLearning; -import org.joda.time.DateTime; -import org.joda.time.chrono.ISOChronology; import java.io.IOException; import java.io.InputStream; +import java.time.Clock; +import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.Objects; @@ -66,7 +66,7 @@ public class ExpiredForecastsRemover implements MlDataRemover { public ExpiredForecastsRemover(Client client, ThreadPool threadPool) { this.client = Objects.requireNonNull(client); this.threadPool = Objects.requireNonNull(threadPool); - this.cutoffEpochMs = DateTime.now(ISOChronology.getInstance()).getMillis(); + this.cutoffEpochMs = Instant.now(Clock.systemDefaultZone()).toEpochMilli(); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldTests.java index 1e5e6fa652db1..ad999daafb254 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/ExtractedFieldTests.java @@ -8,9 +8,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ml.datafeed.extractor.fields.ExtractedField; import org.elasticsearch.xpack.ml.test.SearchHitBuilder; -import org.joda.time.DateTime; import java.util.Arrays; @@ -98,16 +96,16 @@ public void testNewTimeFieldGivenSource() { expectThrows(IllegalArgumentException.class, () -> ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.SOURCE)); } - public void testValueGivenTimeField() { + public void testValueGivenStringTimeField() { final long millis = randomLong(); - final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", new DateTime(millis)).build(); + final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", Long.toString(millis)).build(); final ExtractedField timeField = ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.DOC_VALUE); assertThat(timeField.value(hit), equalTo(new Object[] { millis })); } - public void testValueGivenStringTimeField() { + public void testValueGivenLongTimeField() { final long millis = randomLong(); - final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", Long.toString(millis)).build(); + final SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", millis).build(); final ExtractedField timeField = ExtractedField.newTimeField("time", ExtractedField.ExtractionMethod.DOC_VALUE); assertThat(timeField.value(hit), equalTo(new Object[] { millis })); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java index 1fd6db3de566a..20dd49029b3ea 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/fields/TimeBasedExtractedFieldsTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.ml.test.SearchHitBuilder; -import org.joda.time.DateTime; import java.util.Arrays; import java.util.Collections; @@ -64,13 +63,6 @@ public void testAllTypesOfFields() { assertThat(extractedFields.getSourceFields(), equalTo(new String[] {"src1", "src2"})); } - public void testTimeFieldValue() { - long millis = randomLong(); - SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", new DateTime(millis)).build(); - TimeBasedExtractedFields extractedFields = new TimeBasedExtractedFields(timeField, Collections.singletonList(timeField)); - assertThat(extractedFields.timeFieldValue(hit), equalTo(millis)); - } - public void testStringTimeFieldValue() { long millis = randomLong(); SearchHit hit = new SearchHitBuilder(randomInt()).addField("time", Long.toString(millis)).build(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java index 6e256680eca55..f6f75fe722dac 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.time.DateFormatters; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.xpack.ml.filestructurefinder.TimestampFormatFinder.TimestampMatch; import java.util.Arrays; @@ -316,7 +316,7 @@ private void validateJavaTimestampFormats(List javaTimestampFormats, Str String timestampFormat = javaTimestampFormats.get(i); switch (timestampFormat) { case "ISO8601": - parsed = DateFormatters.forPattern("strict_date_optional_time_nanos").withZone(defaultZone).parse(text); + parsed = DateFormatter.forPattern("strict_date_optional_time_nanos").withZone(defaultZone).parse(text); break; default: java.time.format.DateTimeFormatter parser = new java.time.format.DateTimeFormatterBuilder() diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java index 505a2b871da0b..ee331a99006ed 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java @@ -289,11 +289,16 @@ private Bucket createBucket(boolean isInterim) { return bucket; } + private Date randomDate() { + // between 1970 and 2065 + return new Date(randomLongBetween(0, 3000000000000L)); + } + private List createRecords(boolean isInterim) { List records = new ArrayList<>(); int count = randomIntBetween(0, 100); - Date now = new Date(randomNonNegativeLong()); + Date now = randomDate(); for (int i=0; i { @@ -48,7 +47,7 @@ protected AutodetectResult createTestInstance() { FlushAcknowledgement flushAcknowledgement; String jobId = "foo"; if (randomBoolean()) { - bucket = new Bucket(jobId, new Date(randomNonNegativeLong()), randomNonNegativeLong()); + bucket = new Bucket(jobId, randomDate(), randomNonNegativeLong()); } else { bucket = null; } @@ -56,7 +55,7 @@ protected AutodetectResult createTestInstance() { int size = randomInt(10); records = new ArrayList<>(size); for (int i = 0; i < size; i++) { - AnomalyRecord record = new AnomalyRecord(jobId, new Date(randomLong()), randomNonNegativeLong()); + AnomalyRecord record = new AnomalyRecord(jobId, randomDate(), randomNonNegativeLong()); record.setProbability(randomDoubleBetween(0.0, 1.0, true)); records.add(record); } @@ -67,7 +66,7 @@ protected AutodetectResult createTestInstance() { influencers = new ArrayList<>(size); for (int i = 0; i < size; i++) { Influencer influencer = new Influencer(jobId, randomAlphaOfLength(10), randomAlphaOfLength(10), - new Date(randomNonNegativeLong()), randomNonNegativeLong()); + randomDate(), randomNonNegativeLong()); influencer.setProbability(randomDoubleBetween(0.0, 1.0, true)); influencers.add(influencer); } @@ -89,12 +88,13 @@ protected AutodetectResult createTestInstance() { modelSizeStats = null; } if (randomBoolean()) { - modelPlot = new ModelPlot(jobId, new Date(randomLong()), randomNonNegativeLong(), randomInt()); + modelPlot = new ModelPlot(jobId, randomDate(), randomNonNegativeLong(), randomInt()); } else { modelPlot = null; } if (randomBoolean()) { - forecast = new Forecast(jobId, randomAlphaOfLength(20), new Date(randomLong()), randomNonNegativeLong(), randomInt()); + forecast = new Forecast(jobId, randomAlphaOfLength(20), randomDate(), + randomNonNegativeLong(), randomInt()); } else { forecast = null; } @@ -110,7 +110,8 @@ protected AutodetectResult createTestInstance() { categoryDefinition = null; } if (randomBoolean()) { - flushAcknowledgement = new FlushAcknowledgement(randomAlphaOfLengthBetween(1, 20), new Date(randomNonNegativeLong())); + flushAcknowledgement = new FlushAcknowledgement(randomAlphaOfLengthBetween(1, 20), + randomDate()); } else { flushAcknowledgement = null; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/BucketTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/BucketTests.java index 65343b0a068ac..a49ef0a5e26fa 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/BucketTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/BucketTests.java @@ -33,7 +33,7 @@ public Bucket createTestInstance() { } public Bucket createTestInstance(String jobId) { - Bucket bucket = new Bucket(jobId, new Date(randomNonNegativeLong()), randomNonNegativeLong()); + Bucket bucket = new Bucket(jobId, randomDate(), randomNonNegativeLong()); if (randomBoolean()) { bucket.setAnomalyScore(randomDouble()); } @@ -92,7 +92,7 @@ protected Bucket doParseInstance(XContentParser parser) { } public void testEquals_GivenDifferentClass() { - Bucket bucket = new Bucket("foo", new Date(randomLong()), randomNonNegativeLong()); + Bucket bucket = new Bucket("foo", randomDate(), randomNonNegativeLong()); assertFalse(bucket.equals("a string")); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ForecastTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ForecastTests.java index b1d9f37dcb4f2..a5c15716ea293 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ForecastTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ForecastTests.java @@ -26,7 +26,7 @@ protected Forecast createTestInstance() { public Forecast createTestInstance(String jobId) { Forecast forecast = - new Forecast(jobId, randomAlphaOfLength(20), new Date(randomLong()), + new Forecast(jobId, randomAlphaOfLength(20), randomDate(), randomNonNegativeLong(), randomInt()); if (randomBoolean()) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ModelPlotTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ModelPlotTests.java index 2a5ceb8363b8a..37788bfa203d2 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ModelPlotTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/ModelPlotTests.java @@ -30,7 +30,7 @@ protected ModelPlot createTestInstance() { public ModelPlot createTestInstance(String jobId) { ModelPlot modelPlot = - new ModelPlot(jobId, new Date(randomLong()), randomNonNegativeLong(), randomInt()); + new ModelPlot(jobId, randomDate(), randomNonNegativeLong(), randomInt()); if (randomBoolean()) { modelPlot.setByFieldName(randomAlphaOfLengthBetween(1, 20)); } @@ -73,14 +73,16 @@ protected ModelPlot doParseInstance(XContentParser parser) { public void testEquals_GivenSameObject() { ModelPlot modelPlot = - new ModelPlot(randomAlphaOfLength(15), new Date(randomLong()), randomNonNegativeLong(), randomInt()); + new ModelPlot(randomAlphaOfLength(15), + randomDate(), randomNonNegativeLong(), randomInt()); assertTrue(modelPlot.equals(modelPlot)); } public void testEquals_GivenObjectOfDifferentClass() { ModelPlot modelPlot = - new ModelPlot(randomAlphaOfLength(15), new Date(randomLong()), randomNonNegativeLong(), randomInt()); + new ModelPlot(randomAlphaOfLength(15), + randomDate(), randomNonNegativeLong(), randomInt()); assertFalse(modelPlot.equals("a string")); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/OverallBucketTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/OverallBucketTests.java index 42b29cb5ee224..b6c0a99685d0b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/OverallBucketTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/OverallBucketTests.java @@ -26,7 +26,7 @@ protected OverallBucket createTestInstance() { for (int i = 0; i < jobCount; ++i) { jobs.add(new OverallBucket.JobInfo(JobTests.randomValidJobId(), randomDoubleBetween(0.0, 100.0, true))); } - return new OverallBucket(new Date(randomNonNegativeLong()), + return new OverallBucket(new Date(randomLongBetween(0, 3000000000000L)), randomIntBetween(60, 24 * 3600), randomDoubleBetween(0.0, 100.0, true), jobs, @@ -47,4 +47,4 @@ public void testCompareTo() { assertThat(jobInfo1.compareTo(jobInfo3), lessThan(0)); assertThat(jobInfo2.compareTo(jobInfo3), lessThan(0)); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringTestUtils.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringTestUtils.java index 368758654cb9b..647835bf9311e 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringTestUtils.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringTestUtils.java @@ -87,7 +87,8 @@ public static MonitoringBulkDoc randomMonitoringBulkDoc(final Random random, final MonitoredSystem system, final String type) throws IOException { final String id = random.nextBoolean() ? RandomStrings.randomAsciiLettersOfLength(random, 5) : null; - final long timestamp = RandomNumbers.randomLongBetween(random, 0L, Long.MAX_VALUE); + // ending date is the last second of 9999, should be sufficient + final long timestamp = RandomNumbers.randomLongBetween(random, 0L, 253402300799000L); final long interval = RandomNumbers.randomLongBetween(random, 0L, Long.MAX_VALUE); return new MonitoringBulkDoc(system, type, id, timestamp, interval, source, xContentType); } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java index c23ef3c8ee51c..6caefe148b28a 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java @@ -61,7 +61,7 @@ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, numDataNodes = 1, numClientNodes = 0, transportClientRatio = 0.0, supportsDedicatedMasters = false) public class LocalExporterIntegTests extends LocalExporterIntegTestCase { - private final String indexTimeFormat = randomFrom("YY", "YYYY", "YYYY.MM", "YYYY-MM", "MM.YYYY", "MM", null); + private final String indexTimeFormat = randomFrom("yy", "yyyy", "yyyy.MM", "yyyy-MM", "MM.yyyy", "MM", null); private void stopMonitoring() { // Now disabling the monitoring service, so that no more collection are started diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java index 232034177e87b..59141d2a83aeb 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtils.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.rollup; -import org.elasticsearch.common.rounding.DateTimeUnit; +import org.elasticsearch.common.Rounding; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.action.RollupJobCaps; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; -import org.joda.time.DateTimeZone; import java.util.ArrayList; import java.util.Comparator; @@ -98,7 +97,7 @@ private static void checkDateHisto(DateHistogramAggregationBuilder source, List< DateHistogramInterval interval = new DateHistogramInterval((String)agg.get(RollupField.INTERVAL)); String thisTimezone = (String)agg.get(DateHistogramGroupConfig.TIME_ZONE); - String sourceTimeZone = source.timeZone() == null ? DateTimeZone.UTC.toString() : source.timeZone().toString(); + String sourceTimeZone = source.timeZone() == null ? "UTC" : source.timeZone().toString(); // Ensure we are working on the same timezone if (thisTimezone.equalsIgnoreCase(sourceTimeZone) == false) { @@ -152,10 +151,10 @@ static boolean validateCalendarInterval(DateHistogramInterval requestInterval, // The request must be gte the config. The CALENDAR_ORDERING map values are integers representing // relative orders between the calendar units - DateTimeUnit requestUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(requestInterval.toString()); - long requestOrder = requestUnit.field(DateTimeZone.UTC).getDurationField().getUnitMillis(); - DateTimeUnit configUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(configInterval.toString()); - long configOrder = configUnit.field(DateTimeZone.UTC).getDurationField().getUnitMillis(); + Rounding.DateTimeUnit requestUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(requestInterval.toString()); + long requestOrder = requestUnit.getField().getBaseUnit().getDuration().toMillis(); + Rounding.DateTimeUnit configUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(configInterval.toString()); + long configOrder = configUnit.getField().getBaseUnit().getDuration().toMillis(); // All calendar units are multiples naturally, so we just care about gte return requestOrder >= configOrder; @@ -387,8 +386,8 @@ private static Comparator getComparator() { static long getMillisFixedOrCalendar(String value) { DateHistogramInterval interval = new DateHistogramInterval(value); if (isCalendarInterval(interval)) { - DateTimeUnit intervalUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(interval.toString()); - return intervalUnit.field(DateTimeZone.UTC).getDurationField().getUnitMillis(); + Rounding.DateTimeUnit intervalUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(interval.toString()); + return intervalUnit.getField().getBaseUnit().getDuration().toMillis(); } else { return TimeValue.parseTimeValue(value, "date_histo.comparator.interval").getMillis(); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java index ee29e56a33169..1d5f9093a29df 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java @@ -28,9 +28,9 @@ import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.core.indexing.AsyncTwoPhaseIndexer; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.indexing.IterationResult; -import org.elasticsearch.xpack.core.indexing.AsyncTwoPhaseIndexer; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; @@ -42,6 +42,7 @@ import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig; import org.joda.time.DateTimeZone; +import java.time.ZoneId; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -214,7 +215,7 @@ public static List> createValueSourceBuilders(fi final DateHistogramValuesSourceBuilder dateHistogramBuilder = new DateHistogramValuesSourceBuilder(dateHistogramName); dateHistogramBuilder.dateHistogramInterval(dateHistogram.getInterval()); dateHistogramBuilder.field(dateHistogramField); - dateHistogramBuilder.timeZone(toDateTimeZone(dateHistogram.getTimeZone())); + dateHistogramBuilder.timeZone(ZoneId.of(dateHistogram.getTimeZone())); return Collections.singletonList(dateHistogramBuilder); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java index 95161e0d149dc..d05a78e121296 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig; import org.joda.time.DateTimeZone; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -122,14 +123,14 @@ public void testIncompatibleFixedCalendarInterval() { } public void testBadTimeZone() { - final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"), null, "EST")); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"), null, "CET")); final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") .dateHistogramInterval(new DateHistogramInterval("1h")) - .timeZone(DateTimeZone.UTC); + .timeZone(ZoneOffset.UTC); RuntimeException e = expectThrows(RuntimeException.class, () -> RollupJobIdentifierUtils.findBestJobs(builder, caps)); assertThat(e.getMessage(), equalTo("There is not a rollup job that has a [date_histogram] agg on field " + diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java index 3dc91ede1bd2c..0032b5a88a563 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java @@ -147,7 +147,7 @@ public void testRangeWrongTZ() { Set caps = new HashSet<>(); caps.add(cap); Exception e = expectThrows(IllegalArgumentException.class, - () -> TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1).timeZone("EST"), caps)); + () -> TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1).timeZone("CET"), caps)); assertThat(e.getMessage(), equalTo("Field [foo] in [range] query was found in rollup indices, but requested timezone is not " + "compatible. Options include: [UTC]")); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java index bd8a0b19f8250..9f8796f4c9589 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig; import org.joda.time.DateTimeZone; +import java.time.zone.ZoneRulesException; import java.util.HashMap; import java.util.Map; @@ -84,9 +85,9 @@ public void testDefaultTimeZone() { } public void testUnknownTimeZone() { - Exception e = expectThrows(IllegalArgumentException.class, + Exception e = expectThrows(ZoneRulesException.class, () -> new DateHistogramGroupConfig("foo", DateHistogramInterval.HOUR, null, "FOO")); - assertThat(e.getMessage(), equalTo("The datetime zone id 'FOO' is not recognised")); + assertThat(e.getMessage(), equalTo("Unknown time-zone ID: FOO")); } public void testEmptyHistoField() { diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index b87b1f3761fdb..cdabb36d42760 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.common.rounding.Rounding; +import org.elasticsearch.common.Rounding; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexSettings; @@ -58,12 +58,14 @@ import org.junit.Before; import java.io.IOException; +import java.time.ZoneId; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; @@ -144,22 +146,22 @@ public void testDateHistoAndMetrics() throws Exception { final List> dataset = new ArrayList<>(); dataset.addAll( Arrays.asList( - asMap("the_histo", asLong("2015-03-31T03:00:00"), "counter", 10), - asMap("the_histo", asLong("2015-03-31T03:20:00"), "counter", 20), - asMap("the_histo", asLong("2015-03-31T03:40:00"), "counter", 20), - asMap("the_histo", asLong("2015-03-31T04:00:00"), "counter", 32), - asMap("the_histo", asLong("2015-03-31T04:20:00"), "counter", 54), - asMap("the_histo", asLong("2015-03-31T04:40:00"), "counter", 55), - asMap("the_histo", asLong("2015-03-31T05:00:00"), "counter", 55), - asMap("the_histo", asLong("2015-03-31T05:00:00"), "counter", 70), - asMap("the_histo", asLong("2015-03-31T05:20:00"), "counter", 70), - asMap("the_histo", asLong("2015-03-31T05:40:00"), "counter", 80), - asMap("the_histo", asLong("2015-03-31T06:00:00"), "counter", 80), - asMap("the_histo", asLong("2015-03-31T06:20:00"), "counter", 90), - asMap("the_histo", asLong("2015-03-31T06:40:00"), "counter", 100), - asMap("the_histo", asLong("2015-03-31T07:00:00"), "counter", 120), - asMap("the_histo", asLong("2015-03-31T07:20:00"), "counter", 120), - asMap("the_histo", asLong("2015-03-31T07:40:00"), "counter", 200) + asMap("the_histo", asLong("2015-03-31T03:00:00.000Z"), "counter", 10), + asMap("the_histo", asLong("2015-03-31T03:20:00.000Z"), "counter", 20), + asMap("the_histo", asLong("2015-03-31T03:40:00.000Z"), "counter", 20), + asMap("the_histo", asLong("2015-03-31T04:00:00.000Z"), "counter", 32), + asMap("the_histo", asLong("2015-03-31T04:20:00.000Z"), "counter", 54), + asMap("the_histo", asLong("2015-03-31T04:40:00.000Z"), "counter", 55), + asMap("the_histo", asLong("2015-03-31T05:00:00.000Z"), "counter", 55), + asMap("the_histo", asLong("2015-03-31T05:00:00.000Z"), "counter", 70), + asMap("the_histo", asLong("2015-03-31T05:20:00.000Z"), "counter", 70), + asMap("the_histo", asLong("2015-03-31T05:40:00.000Z"), "counter", 80), + asMap("the_histo", asLong("2015-03-31T06:00:00.000Z"), "counter", 80), + asMap("the_histo", asLong("2015-03-31T06:20:00.000Z"), "counter", 90), + asMap("the_histo", asLong("2015-03-31T06:40:00.000Z"), "counter", 100), + asMap("the_histo", asLong("2015-03-31T07:00:00.000Z"), "counter", 120), + asMap("the_histo", asLong("2015-03-31T07:20:00.000Z"), "counter", 120), + asMap("the_histo", asLong("2015-03-31T07:40:00.000Z"), "counter", 200) ) ); executeTestCase(dataset, job, System.currentTimeMillis(), (resp) -> { @@ -170,7 +172,7 @@ public void testDateHistoAndMetrics() throws Exception { assertThat(request.sourceAsMap(), equalTo( asMap( "_rollup.version", newIDScheme ? 2 : 1, - "the_histo.date_histogram.timestamp", asLong("2015-03-31T03:00:00"), + "the_histo.date_histogram.timestamp", asLong("2015-03-31T03:00:00.000Z"), "the_histo.date_histogram.interval", "1h", "the_histo.date_histogram._count", 3, "counter.avg._count", 3.0, @@ -188,7 +190,7 @@ public void testDateHistoAndMetrics() throws Exception { assertThat(request.sourceAsMap(), equalTo( asMap( "_rollup.version", newIDScheme ? 2 : 1, - "the_histo.date_histogram.timestamp", asLong("2015-03-31T04:00:00"), + "the_histo.date_histogram.timestamp", asLong("2015-03-31T04:00:00.000Z"), "the_histo.date_histogram.interval", "1h", "the_histo.date_histogram._count", 3, "counter.avg._count", 3.0, @@ -206,7 +208,7 @@ public void testDateHistoAndMetrics() throws Exception { assertThat(request.sourceAsMap(), equalTo( asMap( "_rollup.version", newIDScheme ? 2 : 1, - "the_histo.date_histogram.timestamp", asLong("2015-03-31T05:00:00"), + "the_histo.date_histogram.timestamp", asLong("2015-03-31T05:00:00.000Z"), "the_histo.date_histogram.interval", "1h", "the_histo.date_histogram._count", 4, "counter.avg._count", 4.0, @@ -224,7 +226,7 @@ public void testDateHistoAndMetrics() throws Exception { assertThat(request.sourceAsMap(), equalTo( asMap( "_rollup.version", newIDScheme ? 2 : 1, - "the_histo.date_histogram.timestamp", asLong("2015-03-31T06:00:00"), + "the_histo.date_histogram.timestamp", asLong("2015-03-31T06:00:00.000Z"), "the_histo.date_histogram.interval", "1h", "the_histo.date_histogram._count", 3, "counter.avg._count", 3.0, @@ -242,7 +244,7 @@ public void testDateHistoAndMetrics() throws Exception { assertThat(request.sourceAsMap(), equalTo( asMap( "_rollup.version", newIDScheme ? 2 : 1, - "the_histo.date_histogram.timestamp", asLong("2015-03-31T07:00:00"), + "the_histo.date_histogram.timestamp", asLong("2015-03-31T07:00:00.000Z"), "the_histo.date_histogram.interval", "1h", "the_histo.date_histogram._count", 3, "counter.avg._count", 3.0, @@ -326,7 +328,7 @@ public void testSimpleDateHistoWithDelay() throws Exception { public void testSimpleDateHistoWithTimeZone() throws Exception { final List> dataset = new ArrayList<>(); - long now = asLong("2015-04-01T10:00:00"); + long now = asLong("2015-04-01T10:00:00.000Z"); dataset.addAll( Arrays.asList( asMap("the_histo", now - TimeValue.timeValueHours(10).getMillis()), @@ -353,7 +355,7 @@ public void testSimpleDateHistoWithTimeZone() throws Exception { assertThat(request.sourceAsMap(), equalTo( asMap( "_rollup.version", newIDScheme ? 2 : 1, - "the_histo.date_histogram.timestamp", asLong("2015-03-31T03:00:00"), + "the_histo.date_histogram.timestamp", asLong("2015-03-31T03:00:00.000Z"), "the_histo.date_histogram.interval", "1d", "the_histo.date_histogram._count", 2, "the_histo.date_histogram.time_zone", timeZone.toString(), @@ -372,7 +374,7 @@ public void testSimpleDateHistoWithTimeZone() throws Exception { assertThat(request.sourceAsMap(), equalTo( asMap( "_rollup.version", newIDScheme ? 2 : 1, - "the_histo.date_histogram.timestamp", asLong("2015-03-31T03:00:00"), + "the_histo.date_histogram.timestamp", asLong("2015-03-31T03:00:00.000Z"), "the_histo.date_histogram.interval", "1d", "the_histo.date_histogram._count", 2, "the_histo.date_histogram.time_zone", timeZone.toString(), @@ -385,7 +387,7 @@ public void testSimpleDateHistoWithTimeZone() throws Exception { assertThat(request.sourceAsMap(), equalTo( asMap( "_rollup.version", newIDScheme ? 2 : 1, - "the_histo.date_histogram.timestamp", asLong("2015-04-01T03:00:00"), + "the_histo.date_histogram.timestamp", asLong("2015-04-01T03:00:00.000Z"), "the_histo.date_histogram.interval", "1d", "the_histo.date_histogram._count", 5, "the_histo.date_histogram.time_zone", timeZone.toString(), @@ -449,7 +451,7 @@ static Map asMap(Object... fields) { } private static long asLong(String dateTime) { - return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(dateTime).getMillis(); + return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(dateTime); } /** @@ -488,7 +490,8 @@ private void executeTestCase(List> docs, RollupJobConfig con private Map createFieldTypes(RollupJobConfig job) { Map fieldTypes = new HashMap<>(); MappedFieldType fieldType = new DateFieldMapper.Builder(job.getGroupConfig().getDateHistogram().getField()) - .dateTimeFormatter(DateFormatter.forPattern(randomFrom("basic_date", "date_optional_time", "epoch_second"))) + .format(randomFrom("basic_date", "date_optional_time", "epoch_second")) + .locale(Locale.ROOT) .build(new Mapper.BuilderContext(settings.getSettings(), new ContentPath(0))) .fieldType(); fieldTypes.put(fieldType.name(), fieldType); @@ -599,7 +602,7 @@ protected void doNextSearch(SearchRequest request, ActionListener createSourceBuilder() { return new DateHistogramValuesSourceBuilder(id()) .interval(interval) - .timeZone(DateUtils.zoneIdToDateTimeZone(zoneId)); + .timeZone(zoneId); } @Override From 701d89caa2e0e0d154b42a6afd50777a5213c3d3 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Wed, 23 Jan 2019 11:00:37 +0100 Subject: [PATCH 16/24] Mute FilterAggregatorTests#testRandom Relates #37743 --- .../search/aggregations/bucket/filter/FilterAggregatorTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java index f5b5d187e4187..af5f65b9698e4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java @@ -63,6 +63,7 @@ public void testEmpty() throws Exception { directory.close(); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37743") public void testRandom() throws Exception { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); From 6130d151725c202fd11dfdbd267478edefc83f56 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Wed, 23 Jan 2019 11:08:54 +0100 Subject: [PATCH 17/24] Adapt SyncedFlushService (#37691) --- .../indices/flush/SyncedFlushService.java | 15 +++++-------- .../flush/SyncedFlushSingleNodeTests.java | 22 +++++++++++++------ 2 files changed, 21 insertions(+), 16 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index 9bc4e4ead1269..0423559aaf5a5 100644 --- a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +++ b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -32,7 +32,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -290,16 +289,14 @@ private void reportSuccessWithExistingSyncId(ShardId shardId, listener.onResponse(new ShardsSyncedFlushResult(shardId, existingSyncId, totalShards, results)); } - final IndexShardRoutingTable getShardRoutingTable(ShardId shardId, ClusterState state) { - final IndexRoutingTable indexRoutingTable = state.routingTable().index(shardId.getIndexName()); - if (indexRoutingTable == null) { - IndexMetaData index = state.getMetaData().index(shardId.getIndex()); - if (index != null && index.getState() == IndexMetaData.State.CLOSE) { - throw new IndexClosedException(shardId.getIndex()); - } + final IndexShardRoutingTable getShardRoutingTable(final ShardId shardId, final ClusterState state) { + final IndexMetaData indexMetaData = state.getMetaData().index(shardId.getIndex()); + if (indexMetaData == null) { throw new IndexNotFoundException(shardId.getIndexName()); + } else if (indexMetaData.getState() == IndexMetaData.State.CLOSE) { + throw new IndexClosedException(shardId.getIndex()); } - final IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(shardId.id()); + final IndexShardRoutingTable shardRoutingTable = state.routingTable().index(indexMetaData.getIndex()).shard(shardId.id()); if (shardRoutingTable == null) { throw new ShardNotFoundException(shardId); } diff --git a/server/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java b/server/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java index b9e0bd13f3568..9d7f3d5e253cf 100644 --- a/server/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java +++ b/server/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java @@ -20,11 +20,13 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; @@ -38,6 +40,8 @@ import java.util.Map; import java.util.concurrent.ExecutionException; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; + public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { public void testModificationPreventsFlushing() throws InterruptedException { @@ -130,22 +134,26 @@ public void testSyncFailsIfOperationIsInFlight() throws InterruptedException, Ex } public void testSyncFailsOnIndexClosedOrMissing() throws InterruptedException { - createIndex("test"); + createIndex("test", Settings.builder() + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .build()); IndexService test = getInstanceFromNode(IndicesService.class).indexService(resolveIndex("test")); - IndexShard shard = test.getShardOrNull(0); + final IndexShard shard = test.getShardOrNull(0); + assertNotNull(shard); + final ShardId shardId = shard.shardId(); + + final SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); - SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener(); - flushService.attemptSyncedFlush(new ShardId("test", "_na_", 1), listener); + flushService.attemptSyncedFlush(new ShardId(shard.shardId().getIndex(), 1), listener); listener.latch.await(); assertNotNull(listener.error); assertNull(listener.result); assertEquals(ShardNotFoundException.class, listener.error.getClass()); assertEquals("no such shard", listener.error.getMessage()); - final ShardId shardId = shard.shardId(); - - client().admin().indices().prepareClose("test").get(); + assertAcked(client().admin().indices().prepareClose("test")); listener = new SyncedFlushUtil.LatchedListener(); flushService.attemptSyncedFlush(shardId, listener); listener.latch.await(); From f54a3b5f672c5f5728a43f0976ded7f6b7733fda Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Wed, 23 Jan 2019 12:09:40 +0200 Subject: [PATCH 18/24] Don't use Groovy's `withDefault` (#37726) Closes #37061 --- .../main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index af207bcae7ca8..54a14138505cd 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -392,7 +392,7 @@ class BuildPlugin implements Plugin { static void requireJavaHome(Task task, int version) { Project rootProject = task.project.rootProject // use root project for global accounting if (rootProject.hasProperty('requiredJavaVersions') == false) { - rootProject.rootProject.ext.requiredJavaVersions = [:].withDefault{key -> return []} + rootProject.rootProject.ext.requiredJavaVersions = [:] rootProject.gradle.taskGraph.whenReady { TaskExecutionGraph taskGraph -> List messages = [] for (entry in rootProject.requiredJavaVersions) { @@ -415,7 +415,7 @@ class BuildPlugin implements Plugin { throw new GradleException("JAVA${version}_HOME required to run task:\n${task}") } } else { - rootProject.requiredJavaVersions.get(version).add(task) + rootProject.requiredJavaVersions.getOrDefault(version, []).add(task) } } From 1de286bfb3fc8190ef30ad4195f82e124b7417ff Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Wed, 23 Jan 2019 12:15:00 +0100 Subject: [PATCH 19/24] Add a note how to benchmark Elasticsearch With this commit we add a note that explains when to use benchmarks and point the reader to the macrobenchmarking tool Rally for further information. Relates #37694 --- TESTING.asciidoc | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 6389f8cb3038d..cf4a40713114c 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -631,3 +631,13 @@ inside `/etc/hosts`, e.g.: 255.255.255.255 broadcasthost ::1 localhost ElasticMBP.local` .... + +== Benchmarking + +For changes that might affect the performance characteristics of Elasticsearch +you should also run macrobenchmarks. We maintain a macrobenchmarking tool +called https://github.com/elastic/rally[Rally] +which you can use to measure the performance impact. It comes with a set of +default benchmarks that we also +https://elasticsearch-benchmarks.elastic.co/[run every night]. To get started, +please see https://esrally.readthedocs.io/en/stable/[Rally's documentation]. From 100537fbc320d488c5b217999f01fdb63ce1e73e Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Wed, 23 Jan 2019 12:19:54 +0100 Subject: [PATCH 20/24] Target only specific index in update settings test With this commit we limit the update settings request to the index that is used in `IndicesClientIT#testIndexPutSettings()`. This avoids spurious exceptions involving other indices that might also be present in the test cluster. Closes #36931 Relates #37338 --- .../test/java/org/elasticsearch/client/IndicesClientIT.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index af8a51b4900d8..55c20ed6d7792 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -1177,7 +1177,7 @@ public void testIndexPutSettings() throws IOException { createIndex(index, Settings.EMPTY); assertThat(dynamicSetting.getDefault(Settings.EMPTY), not(dynamicSettingValue)); - UpdateSettingsRequest dynamicSettingRequest = new UpdateSettingsRequest(); + UpdateSettingsRequest dynamicSettingRequest = new UpdateSettingsRequest(index); dynamicSettingRequest.settings(Settings.builder().put(dynamicSettingKey, dynamicSettingValue).build()); AcknowledgedResponse response = execute(dynamicSettingRequest, highLevelClient().indices()::putSettings, highLevelClient().indices()::putSettingsAsync); @@ -1187,7 +1187,7 @@ public void testIndexPutSettings() throws IOException { assertThat(indexSettingsAsMap.get(dynamicSettingKey), equalTo(String.valueOf(dynamicSettingValue))); assertThat(staticSetting.getDefault(Settings.EMPTY), not(staticSettingValue)); - UpdateSettingsRequest staticSettingRequest = new UpdateSettingsRequest(); + UpdateSettingsRequest staticSettingRequest = new UpdateSettingsRequest(index); staticSettingRequest.settings(Settings.builder().put(staticSettingKey, staticSettingValue).build()); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> execute(staticSettingRequest, highLevelClient().indices()::putSettings, highLevelClient().indices()::putSettingsAsync)); @@ -1207,7 +1207,7 @@ public void testIndexPutSettings() throws IOException { assertThat(indexSettingsAsMap.get(staticSettingKey), equalTo(staticSettingValue)); assertThat(unmodifiableSetting.getDefault(Settings.EMPTY), not(unmodifiableSettingValue)); - UpdateSettingsRequest unmodifiableSettingRequest = new UpdateSettingsRequest(); + UpdateSettingsRequest unmodifiableSettingRequest = new UpdateSettingsRequest(index); unmodifiableSettingRequest.settings(Settings.builder().put(unmodifiableSettingKey, unmodifiableSettingValue).build()); exception = expectThrows(ElasticsearchException.class, () -> execute(unmodifiableSettingRequest, highLevelClient().indices()::putSettings, highLevelClient().indices()::putSettingsAsync)); From 4ec3a6d922f302ed65b322506de8cdac3b8e1405 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 23 Jan 2019 12:38:44 +0100 Subject: [PATCH 21/24] Ensure either success or failure path for SearchOperationListener is called (#37467) Today we have several implementations of executing SearchOperationListener in SearchService. While all of them seem to be safe at least on, the one that executes scroll searches can cause illegal execution of SearchOperationListener that can then in-turn trigger assertions in ShardSearchStats. This change adds a SearchOperationListenerExecutor that uses try-with blocks to ensure listeners are called in a safe way. Relates to #37185 --- .../elasticsearch/search/SearchService.java | 171 ++++++++++-------- 1 file changed, 95 insertions(+), 76 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index ddec3637ed491..5e2758eb5b83c 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -24,7 +24,6 @@ import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.TopDocs; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchTask; @@ -329,7 +328,7 @@ private DfsSearchResult executeDfsPhase(ShardSearchRequest request, SearchTask t } catch (Exception e) { logger.trace("Dfs phase failed", e); processFailure(context, e); - throw ExceptionsHelper.convertToRuntime(e); + throw e; } finally { cleanContext(context); } @@ -380,29 +379,24 @@ protected void doRun() { }); } - private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, SearchTask task) throws IOException { + private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, SearchTask task) throws Exception { final SearchContext context = createAndPutContext(request); - final SearchOperationListener operationListener = context.indexShard().getSearchOperationListener(); context.incRef(); - boolean queryPhaseSuccess = false; try { context.setTask(task); - operationListener.onPreQueryPhase(context); - long time = System.nanoTime(); - contextProcessing(context); - - loadOrExecuteQueryPhase(request, context); - - if (context.queryResult().hasSearchContext() == false && context.scrollContext() == null) { - freeContext(context.id()); - } else { - contextProcessedSuccessfully(context); + final long afterQueryTime; + try (SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context)) { + contextProcessing(context); + loadOrExecuteQueryPhase(request, context); + if (context.queryResult().hasSearchContext() == false && context.scrollContext() == null) { + freeContext(context.id()); + } else { + contextProcessedSuccessfully(context); + } + afterQueryTime = executor.success(); } - final long afterQueryTime = System.nanoTime(); - queryPhaseSuccess = true; - operationListener.onQueryPhase(context, afterQueryTime - time); if (request.numberOfShards() == 1) { - return executeFetchPhase(context, operationListener, afterQueryTime); + return executeFetchPhase(context, afterQueryTime); } return context.queryResult(); } catch (Exception e) { @@ -411,21 +405,16 @@ private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, SearchTa e = (e.getCause() == null || e.getCause() instanceof Exception) ? (Exception) e.getCause() : new ElasticsearchException(e.getCause()); } - if (!queryPhaseSuccess) { - operationListener.onFailedQueryPhase(context); - } logger.trace("Query phase failed", e); processFailure(context, e); - throw ExceptionsHelper.convertToRuntime(e); + throw e; } finally { cleanContext(context); } } - private QueryFetchSearchResult executeFetchPhase(SearchContext context, SearchOperationListener operationListener, - long afterQueryTime) { - operationListener.onPreFetchPhase(context); - try { + private QueryFetchSearchResult executeFetchPhase(SearchContext context, long afterQueryTime) { + try (SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context, true, afterQueryTime)){ shortcutDocIdsToLoad(context); fetchPhase.execute(context); if (fetchPhaseShouldFreeContext(context)) { @@ -433,34 +422,27 @@ private QueryFetchSearchResult executeFetchPhase(SearchContext context, SearchOp } else { contextProcessedSuccessfully(context); } - } catch (Exception e) { - operationListener.onFailedFetchPhase(context); - throw ExceptionsHelper.convertToRuntime(e); + executor.success(); } - operationListener.onFetchPhase(context, System.nanoTime() - afterQueryTime); return new QueryFetchSearchResult(context.queryResult(), context.fetchResult()); } public void executeQueryPhase(InternalScrollSearchRequest request, SearchTask task, ActionListener listener) { runAsync(request.id(), () -> { final SearchContext context = findContext(request.id(), request); - SearchOperationListener operationListener = context.indexShard().getSearchOperationListener(); context.incRef(); - try { + try (SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context)) { context.setTask(task); - operationListener.onPreQueryPhase(context); - long time = System.nanoTime(); contextProcessing(context); processScroll(request, context); queryPhase.execute(context); contextProcessedSuccessfully(context); - operationListener.onQueryPhase(context, System.nanoTime() - time); + executor.success(); return new ScrollQuerySearchResult(context.queryResult(), context.shardTarget()); } catch (Exception e) { - operationListener.onFailedQueryPhase(context); logger.trace("Query phase failed", e); processFailure(context, e); - throw ExceptionsHelper.convertToRuntime(e); + throw e; } finally { cleanContext(context); } @@ -471,15 +453,10 @@ public void executeQueryPhase(QuerySearchRequest request, SearchTask task, Actio runAsync(request.id(), () -> { final SearchContext context = findContext(request.id(), request); context.setTask(task); - IndexShard indexShard = context.indexShard(); - SearchOperationListener operationListener = indexShard.getSearchOperationListener(); context.incRef(); - try { + try (SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context)) { contextProcessing(context); context.searcher().setAggregatedDfs(request.dfs()); - - operationListener.onPreQueryPhase(context); - long time = System.nanoTime(); queryPhase.execute(context); if (context.queryResult().hasSearchContext() == false && context.scrollContext() == null) { // no hits, we can release the context since there will be no fetch phase @@ -487,13 +464,12 @@ public void executeQueryPhase(QuerySearchRequest request, SearchTask task, Actio } else { contextProcessedSuccessfully(context); } - operationListener.onQueryPhase(context, System.nanoTime() - time); + executor.success(); return context.queryResult(); } catch (Exception e) { - operationListener.onFailedQueryPhase(context); logger.trace("Query phase failed", e); processFailure(context, e); - throw ExceptionsHelper.convertToRuntime(e); + throw e; } finally { cleanContext(context); } @@ -527,28 +503,19 @@ public void executeFetchPhase(InternalScrollSearchRequest request, SearchTask ta ActionListener listener) { runAsync(request.id(), () -> { final SearchContext context = findContext(request.id(), request); + context.setTask(task); context.incRef(); - try { - context.setTask(task); + try (SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context)){ contextProcessing(context); - SearchOperationListener operationListener = context.indexShard().getSearchOperationListener(); processScroll(request, context); - operationListener.onPreQueryPhase(context); - final long time = System.nanoTime(); - try { - queryPhase.execute(context); - } catch (Exception e) { - operationListener.onFailedQueryPhase(context); - throw ExceptionsHelper.convertToRuntime(e); - } - long afterQueryTime = System.nanoTime(); - operationListener.onQueryPhase(context, afterQueryTime - time); - QueryFetchSearchResult fetchSearchResult = executeFetchPhase(context, operationListener, afterQueryTime); + queryPhase.execute(context); + final long afterQueryTime = executor.success(); + QueryFetchSearchResult fetchSearchResult = executeFetchPhase(context, afterQueryTime); return new ScrollQueryFetchSearchResult(fetchSearchResult, context.shardTarget()); } catch (Exception e) { logger.trace("Fetch phase failed", e); processFailure(context, e); - throw ExceptionsHelper.convertToRuntime(e); + throw e; } finally { cleanContext(context); } @@ -558,7 +525,6 @@ public void executeFetchPhase(InternalScrollSearchRequest request, SearchTask ta public void executeFetchPhase(ShardFetchRequest request, SearchTask task, ActionListener listener) { runAsync(request.id(), () -> { final SearchContext context = findContext(request.id(), request); - final SearchOperationListener operationListener = context.indexShard().getSearchOperationListener(); context.incRef(); try { context.setTask(task); @@ -567,21 +533,20 @@ public void executeFetchPhase(ShardFetchRequest request, SearchTask task, Action context.scrollContext().lastEmittedDoc = request.lastEmittedDoc(); } context.docIdsToLoad(request.docIds(), 0, request.docIdsSize()); - operationListener.onPreFetchPhase(context); - long time = System.nanoTime(); - fetchPhase.execute(context); - if (fetchPhaseShouldFreeContext(context)) { - freeContext(request.id()); - } else { - contextProcessedSuccessfully(context); + try (SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context, true, System.nanoTime())) { + fetchPhase.execute(context); + if (fetchPhaseShouldFreeContext(context)) { + freeContext(request.id()); + } else { + contextProcessedSuccessfully(context); + } + executor.success(); } - operationListener.onFetchPhase(context, System.nanoTime() - time); return context.fetchResult(); } catch (Exception e) { - operationListener.onFailedFetchPhase(context); logger.trace("Fetch phase failed", e); processFailure(context, e); - throw ExceptionsHelper.convertToRuntime(e); + throw e; } finally { cleanContext(context); } @@ -661,7 +626,7 @@ final SearchContext createContext(ShardSearchRequest request) throws IOException context.lowLevelCancellation(lowLevelCancellation); } catch (Exception e) { context.close(); - throw ExceptionsHelper.convertToRuntime(e); + throw e; } return context; @@ -733,7 +698,7 @@ public void freeAllScrollContexts() { } } - private void contextScrollKeepAlive(SearchContext context, long keepAlive) throws IOException { + private void contextScrollKeepAlive(SearchContext context, long keepAlive) { if (keepAlive > maxKeepAlive) { throw new IllegalArgumentException( "Keep alive for scroll (" + TimeValue.timeValueMillis(keepAlive) + ") is too large. " + @@ -991,7 +956,7 @@ private void shortcutDocIdsToLoad(SearchContext context) { context.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length); } - private void processScroll(InternalScrollSearchRequest request, SearchContext context) throws IOException { + private void processScroll(InternalScrollSearchRequest request, SearchContext context) { // process scroll context.from(context.from() + context.size()); context.scrollContext().scroll = request.scroll(); @@ -1147,4 +1112,58 @@ public boolean canMatch() { return canMatch; } } + + /** + * This helper class ensures we only execute either the success or the failure path for {@link SearchOperationListener}. + * This is crucial for some implementations like {@link org.elasticsearch.index.search.stats.ShardSearchStats}. + */ + private static final class SearchOperationListenerExecutor implements AutoCloseable { + private final SearchOperationListener listener; + private final SearchContext context; + private final long time; + private final boolean fetch; + private long afterQueryTime = -1; + private boolean closed = false; + + SearchOperationListenerExecutor(SearchContext context) { + this(context, false, System.nanoTime()); + } + + SearchOperationListenerExecutor(SearchContext context, boolean fetch, long startTime) { + this.listener = context.indexShard().getSearchOperationListener(); + this.context = context; + time = startTime; + this.fetch = fetch; + if (fetch) { + listener.onPreFetchPhase(context); + } else { + listener.onPreQueryPhase(context); + } + } + + long success() { + return afterQueryTime = System.nanoTime(); + } + + @Override + public void close() { + assert closed == false : "already closed - while technically ok double closing is a likely a bug in this case"; + if (closed == false) { + closed = true; + if (afterQueryTime != -1) { + if (fetch) { + listener.onFetchPhase(context, afterQueryTime - time); + } else { + listener.onQueryPhase(context, afterQueryTime - time); + } + } else { + if (fetch) { + listener.onFailedFetchPhase(context); + } else { + listener.onFailedQueryPhase(context); + } + } + } + } + } } From d5139e0590ed044c0950cfede5bac537a3fba2b3 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 23 Jan 2019 13:10:11 +0100 Subject: [PATCH 22/24] Only bootstrap and elect node in current voting configuration (#37712) Adapts bootstrapping and leader election to only trigger on nodes that are actually part of the voting configuration. --- .../coordination/ClusterBootstrapService.java | 9 +++- .../cluster/coordination/Coordinator.java | 39 +++++++++++++++++- .../ClusterBootstrapServiceTests.java | 12 ++++++ .../coordination/CoordinatorTests.java | 41 +++++++++++++------ 4 files changed, 87 insertions(+), 14 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterBootstrapService.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterBootstrapService.java index d21c54c03e4e5..cdbf6b6691077 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterBootstrapService.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterBootstrapService.java @@ -97,7 +97,7 @@ public static boolean discoveryIsConfigured(Settings settings) { void onFoundPeersUpdated() { final Set nodes = getDiscoveredNodes(); - if (transportService.getLocalNode().isMasterNode() && bootstrapRequirements.isEmpty() == false + if (bootstrappingPermitted.get() && transportService.getLocalNode().isMasterNode() && bootstrapRequirements.isEmpty() == false && isBootstrappedSupplier.getAsBoolean() == false && nodes.stream().noneMatch(Coordinator::isZen1Node)) { final Tuple,List> requirementMatchingResult; @@ -114,6 +114,13 @@ void onFoundPeersUpdated() { logger.trace("nodesMatchingRequirements={}, unsatisfiedRequirements={}, bootstrapRequirements={}", nodesMatchingRequirements, unsatisfiedRequirements, bootstrapRequirements); + if (nodesMatchingRequirements.contains(transportService.getLocalNode()) == false) { + logger.info("skipping cluster bootstrapping as local node does not match bootstrap requirements: {}", + bootstrapRequirements); + bootstrappingPermitted.set(false); + return; + } + if (nodesMatchingRequirements.size() * 2 > bootstrapRequirements.size()) { startBootstrap(nodesMatchingRequirements, unsatisfiedRequirements); } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index 084d5cf38f2db..4a018c1f78f91 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -348,6 +348,12 @@ private void startElection() { // The preVoteCollector is only active while we are candidate, but it does not call this method with synchronisation, so we have // to check our mode again here. if (mode == Mode.CANDIDATE) { + if (electionQuorumContainsLocalNode(getLastAcceptedState()) == false) { + logger.trace("skip election as local node is not part of election quorum: {}", + getLastAcceptedState().coordinationMetaData()); + return; + } + final StartJoinRequest startJoinRequest = new StartJoinRequest(getLocalNode(), Math.max(getCurrentTerm(), maxTermSeen) + 1); logger.debug("starting election with {}", startJoinRequest); @@ -360,6 +366,13 @@ private void startElection() { } } + private static boolean electionQuorumContainsLocalNode(ClusterState lastAcceptedState) { + final String localNodeId = lastAcceptedState.nodes().getLocalNodeId(); + assert localNodeId != null; + return lastAcceptedState.getLastCommittedConfiguration().getNodeIds().contains(localNodeId) + || lastAcceptedState.getLastAcceptedConfiguration().getNodeIds().contains(localNodeId); + } + private Optional ensureTermAtLeast(DiscoveryNode sourceNode, long targetTerm) { assert Thread.holdsLock(mutex) : "Coordinator mutex not held"; if (getCurrentTerm() < targetTerm) { @@ -709,10 +722,24 @@ public boolean setInitialConfiguration(final VotingConfiguration votingConfigura return false; } + if (getLocalNode().isMasterNode() == false) { + logger.debug("skip setting initial configuration as local node is not a master-eligible node"); + throw new CoordinationStateRejectedException( + "this node is not master-eligible, but cluster bootstrapping can only happen on a master-eligible node"); + } + + if (votingConfiguration.getNodeIds().contains(getLocalNode().getId()) == false) { + logger.debug("skip setting initial configuration as local node is not part of initial configuration"); + throw new CoordinationStateRejectedException("local node is not part of initial configuration"); + } + final List knownNodes = new ArrayList<>(); knownNodes.add(getLocalNode()); peerFinder.getFoundPeers().forEach(knownNodes::add); + if (votingConfiguration.hasQuorum(knownNodes.stream().map(DiscoveryNode::getId).collect(Collectors.toList())) == false) { + logger.debug("skip setting initial configuration as not enough nodes discovered to form a quorum in the " + + "initial configuration [knownNodes={}, {}]", knownNodes, votingConfiguration); throw new CoordinationStateRejectedException("not enough nodes discovered to form a quorum in the initial configuration " + "[knownNodes=" + knownNodes + ", " + votingConfiguration + "]"); } @@ -729,6 +756,8 @@ public boolean setInitialConfiguration(final VotingConfiguration votingConfigura metaDataBuilder.coordinationMetaData(coordinationMetaData); coordinationState.get().setInitialState(ClusterState.builder(currentState).metaData(metaDataBuilder).build()); + assert electionQuorumContainsLocalNode(getLastAcceptedState()) : + "initial state does not have local node in its election quorum: " + getLastAcceptedState().coordinationMetaData(); preVoteCollector.update(getPreVoteResponse(), null); // pick up the change to last-accepted version startElectionScheduler(); return true; @@ -1022,12 +1051,20 @@ private void startElectionScheduler() { public void run() { synchronized (mutex) { if (mode == Mode.CANDIDATE) { + final ClusterState lastAcceptedState = coordinationState.get().getLastAcceptedState(); + + if (electionQuorumContainsLocalNode(lastAcceptedState) == false) { + logger.trace("skip prevoting as local node is not part of election quorum: {}", + lastAcceptedState.coordinationMetaData()); + return; + } + if (prevotingRound != null) { prevotingRound.close(); } - final ClusterState lastAcceptedState = coordinationState.get().getLastAcceptedState(); final List discoveredNodes = getDiscoveredNodes().stream().filter(n -> isZen1Node(n) == false).collect(Collectors.toList()); + prevotingRound = preVoteCollector.start(lastAcceptedState, discoveredNodes); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterBootstrapServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterBootstrapServiceTests.java index 46a43afa53897..c9ebdf278c71d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterBootstrapServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterBootstrapServiceTests.java @@ -328,6 +328,18 @@ public void testDoesNotBootstrapsOnNonMasterNode() { deterministicTaskQueue.runAllTasks(); } + public void testDoesNotBootstrapsIfLocalNodeNotInInitialMasterNodes() { + ClusterBootstrapService clusterBootstrapService = new ClusterBootstrapService(Settings.builder().putList( + INITIAL_MASTER_NODES_SETTING.getKey(), otherNode1.getName(), otherNode2.getName()).build(), + transportService, () -> + Stream.of(localNode, otherNode1, otherNode2).collect(Collectors.toList()), () -> false, vc -> { + throw new AssertionError("should not be called"); + }); + transportService.start(); + clusterBootstrapService.onFoundPeersUpdated(); + deterministicTaskQueue.runAllTasks(); + } + public void testDoesNotBootstrapsIfNotConfigured() { ClusterBootstrapService clusterBootstrapService = new ClusterBootstrapService( Settings.builder().putList(INITIAL_MASTER_NODES_SETTING.getKey()).build(), transportService, diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java index a9ca7d917b9d8..7db63ab120e91 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.coordination; import com.carrotsearch.randomizedtesting.RandomizedContext; - import org.apache.logging.log4j.CloseableThreadContext; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -53,6 +52,7 @@ import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.discovery.zen.PublishClusterStateStats; import org.elasticsearch.discovery.zen.UnicastHostsProvider.HostsResolver; import org.elasticsearch.env.NodeEnvironment; @@ -93,10 +93,10 @@ import static org.elasticsearch.cluster.coordination.CoordinationStateTests.clusterState; import static org.elasticsearch.cluster.coordination.CoordinationStateTests.setValue; import static org.elasticsearch.cluster.coordination.CoordinationStateTests.value; -import static org.elasticsearch.cluster.coordination.Coordinator.PUBLISH_TIMEOUT_SETTING; import static org.elasticsearch.cluster.coordination.Coordinator.Mode.CANDIDATE; import static org.elasticsearch.cluster.coordination.Coordinator.Mode.FOLLOWER; import static org.elasticsearch.cluster.coordination.Coordinator.Mode.LEADER; +import static org.elasticsearch.cluster.coordination.Coordinator.PUBLISH_TIMEOUT_SETTING; import static org.elasticsearch.cluster.coordination.CoordinatorTests.Cluster.DEFAULT_DELAY_VARIABILITY; import static org.elasticsearch.cluster.coordination.ElectionSchedulerFactory.ELECTION_BACK_OFF_TIME_SETTING; import static org.elasticsearch.cluster.coordination.ElectionSchedulerFactory.ELECTION_DURATION_SETTING; @@ -117,7 +117,6 @@ import static org.elasticsearch.transport.TransportService.NOOP_TRANSPORT_INTERCEPTOR; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -745,7 +744,7 @@ public void testSettingInitialConfigurationTriggersElection() { assertThat(nodeId + " should have found all peers", foundPeers, hasSize(cluster.size())); } - final ClusterNode bootstrapNode = cluster.getAnyNode(); + final ClusterNode bootstrapNode = cluster.getAnyBootstrappableNode(); bootstrapNode.applyInitialConfiguration(); assertTrue(bootstrapNode.getId() + " has been bootstrapped", bootstrapNode.coordinator.isInitialConfigurationSet()); @@ -775,13 +774,13 @@ public void testCannotSetInitialConfigurationTwice() { public void testCannotSetInitialConfigurationWithoutQuorum() { final Cluster cluster = new Cluster(randomIntBetween(1, 5)); final Coordinator coordinator = cluster.getAnyNode().coordinator; - final VotingConfiguration unknownNodeConfiguration = new VotingConfiguration(Collections.singleton("unknown-node")); + final VotingConfiguration unknownNodeConfiguration = new VotingConfiguration( + Sets.newHashSet(coordinator.getLocalNode().getId(), "unknown-node")); final String exceptionMessage = expectThrows(CoordinationStateRejectedException.class, () -> coordinator.setInitialConfiguration(unknownNodeConfiguration)).getMessage(); assertThat(exceptionMessage, startsWith("not enough nodes discovered to form a quorum in the initial configuration [knownNodes=[")); - assertThat(exceptionMessage, - endsWith("], VotingConfiguration{unknown-node}]")); + assertThat(exceptionMessage, containsString("unknown-node")); assertThat(exceptionMessage, containsString(coordinator.getLocalNode().toString())); // This is VERY BAD: setting a _different_ initial configuration. Yet it works if the first attempt will never be a quorum. @@ -789,6 +788,16 @@ public void testCannotSetInitialConfigurationWithoutQuorum() { cluster.stabilise(); } + public void testCannotSetInitialConfigurationWithoutLocalNode() { + final Cluster cluster = new Cluster(randomIntBetween(1, 5)); + final Coordinator coordinator = cluster.getAnyNode().coordinator; + final VotingConfiguration unknownNodeConfiguration = new VotingConfiguration(Sets.newHashSet("unknown-node")); + final String exceptionMessage = expectThrows(CoordinationStateRejectedException.class, + () -> coordinator.setInitialConfiguration(unknownNodeConfiguration)).getMessage(); + assertThat(exceptionMessage, + equalTo("local node is not part of initial configuration")); + } + public void testDiffBasedPublishing() { final Cluster cluster = new Cluster(randomIntBetween(1, 5)); cluster.runRandomly(); @@ -1331,7 +1340,7 @@ void bootstrapIfNecessary() { assertThat("setting initial configuration may fail with disconnected nodes", disconnectedNodes, empty()); assertThat("setting initial configuration may fail with blackholed nodes", blackholedNodes, empty()); runFor(defaultMillis(DISCOVERY_FIND_PEERS_INTERVAL_SETTING) * 2, "discovery prior to setting initial configuration"); - final ClusterNode bootstrapNode = getAnyMasterEligibleNode(); + final ClusterNode bootstrapNode = getAnyBootstrappableNode(); bootstrapNode.applyInitialConfiguration(); } else { logger.info("setting initial configuration not required"); @@ -1402,8 +1411,10 @@ boolean nodeExists(DiscoveryNode node) { return clusterNodes.stream().anyMatch(cn -> cn.getLocalNode().equals(node)); } - ClusterNode getAnyMasterEligibleNode() { - return randomFrom(clusterNodes.stream().filter(n -> n.getLocalNode().isMasterNode()).collect(Collectors.toList())); + ClusterNode getAnyBootstrappableNode() { + return randomFrom(clusterNodes.stream().filter(n -> n.getLocalNode().isMasterNode()) + .filter(n -> initialConfiguration.getNodeIds().contains(n.getLocalNode().getId())) + .collect(Collectors.toList())); } ClusterNode getAnyNode() { @@ -1737,8 +1748,14 @@ void applyInitialConfiguration() { Stream.generate(() -> BOOTSTRAP_PLACEHOLDER_PREFIX + UUIDs.randomBase64UUID(random())) .limit((Math.max(initialConfiguration.getNodeIds().size(), 2) - 1) / 2) .forEach(nodeIdsWithPlaceholders::add); - final VotingConfiguration configurationWithPlaceholders = new VotingConfiguration(new HashSet<>( - randomSubsetOf(initialConfiguration.getNodeIds().size(), nodeIdsWithPlaceholders))); + final Set nodeIds = new HashSet<>( + randomSubsetOf(initialConfiguration.getNodeIds().size(), nodeIdsWithPlaceholders)); + // initial configuration should not have a place holder for local node + if (initialConfiguration.getNodeIds().contains(localNode.getId()) && nodeIds.contains(localNode.getId()) == false) { + nodeIds.remove(nodeIds.iterator().next()); + nodeIds.add(localNode.getId()); + } + final VotingConfiguration configurationWithPlaceholders = new VotingConfiguration(nodeIds); try { coordinator.setInitialConfiguration(configurationWithPlaceholders); logger.info("successfully set initial configuration to {}", configurationWithPlaceholders); From 12f5b02fd0364de99b5d2d035cc7ffb579b43671 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 23 Jan 2019 13:53:37 +0100 Subject: [PATCH 23/24] Streamline skip_unavailable handling (#37672) This commit moves the collectSearchShards method out of RemoteClusterService into TransportSearchAction that currently calls it. RemoteClusterService used to be used only for cross-cluster search but is now also used in cross-cluster replication where different API are called through the RemoteClusterAwareClient. There is no reason for the collectSearchShards and fetchShards methods to be respectively in RemoteClusterService and RemoteClusterConnection. The search shards API can be called through the RemoteClusterAwareClient too, the only missing bit is a way to handle failures based on the skip_unavailable setting for each cluster (currently only supported in RemoteClusterConnection#fetchShards) which is achieved by adding a isSkipUnavailable(String clusterAlias) method to RemoteClusterService. This change is useful for #32125 as we will very soon need to also call the search API against remote clusters, which will be done through RemoteClusterAwareClient. In that case we will also need to support skip_unavailable when calling the search API so we need some way to handle the skip_unavailable setting like we currently do for the search_shards call. Relates to #32125 --- .../shards/ClusterSearchShardsResponse.java | 4 - .../action/search/TransportSearchAction.java | 95 ++++++-- .../transport/RemoteClusterConnection.java | 62 +----- .../transport/RemoteClusterService.java | 65 +----- .../search/TransportSearchActionTests.java | 210 +++++++++++++++--- .../RemoteClusterAwareClientTests.java | 140 ++++++++++++ .../RemoteClusterConnectionTests.java | 203 +---------------- .../transport/RemoteClusterServiceTests.java | 209 +++-------------- 8 files changed, 445 insertions(+), 543 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponse.java index 57407bd61fb82..c8889c86c1df7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponse.java @@ -29,15 +29,11 @@ import java.io.IOException; import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.Map; public class ClusterSearchShardsResponse extends ActionResponse implements ToXContentObject { - public static final ClusterSearchShardsResponse EMPTY = new ClusterSearchShardsResponse(new ClusterSearchShardsGroup[0], - new DiscoveryNode[0], Collections.emptyMap()); - private final ClusterSearchShardsGroup[] groups; private final DiscoveryNode[] nodes; private final Map indicesAndFilters; diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 3f03c521df52a..30e030eca7376 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -22,10 +22,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup; +import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -39,6 +41,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.shard.ShardId; @@ -50,6 +53,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.RemoteClusterService; +import org.elasticsearch.transport.RemoteTransportException; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; @@ -60,8 +64,11 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.LongSupplier; @@ -195,17 +202,23 @@ protected void doExecute(Task task, SearchRequest searchRequest, ActionListener< executeSearch((SearchTask)task, timeProvider, searchRequest, localIndices, Collections.emptyList(), (clusterName, nodeId) -> null, clusterState, Collections.emptyMap(), listener, SearchResponse.Clusters.EMPTY); } else { - remoteClusterService.collectSearchShards(searchRequest.indicesOptions(), searchRequest.preference(), - searchRequest.routing(), remoteClusterIndices, ActionListener.wrap((searchShardsResponses) -> { - List remoteShardIterators = new ArrayList<>(); - Map remoteAliasFilters = new HashMap<>(); - BiFunction clusterNodeLookup = processRemoteShards(searchShardsResponses, - remoteClusterIndices, remoteShardIterators, remoteAliasFilters); - SearchResponse.Clusters clusters = buildClusters(localIndices, remoteClusterIndices, searchShardsResponses); - executeSearch((SearchTask) task, timeProvider, searchRequest, localIndices, - remoteShardIterators, clusterNodeLookup, clusterState, remoteAliasFilters, listener, - clusters); - }, listener::onFailure)); + AtomicInteger skippedClusters = new AtomicInteger(0); + collectSearchShards(searchRequest.indicesOptions(), searchRequest.preference(), searchRequest.routing(), skippedClusters, + remoteClusterIndices, remoteClusterService, threadPool, + ActionListener.wrap( + searchShardsResponses -> { + List remoteShardIterators = new ArrayList<>(); + Map remoteAliasFilters = new HashMap<>(); + BiFunction clusterNodeLookup = processRemoteShards( + searchShardsResponses, remoteClusterIndices, remoteShardIterators, remoteAliasFilters); + int localClusters = localIndices == null ? 0 : 1; + int totalClusters = remoteClusterIndices.size() + localClusters; + int successfulClusters = searchShardsResponses.size() + localClusters; + executeSearch((SearchTask) task, timeProvider, searchRequest, localIndices, + remoteShardIterators, clusterNodeLookup, clusterState, remoteAliasFilters, listener, + new SearchResponse.Clusters(totalClusters, successfulClusters, skippedClusters.get())); + }, + listener::onFailure)); } }, listener::onFailure); if (searchRequest.source() == null) { @@ -216,18 +229,56 @@ protected void doExecute(Task task, SearchRequest searchRequest, ActionListener< } } - static SearchResponse.Clusters buildClusters(OriginalIndices localIndices, Map remoteIndices, - Map searchShardsResponses) { - int localClusters = localIndices == null ? 0 : 1; - int totalClusters = remoteIndices.size() + localClusters; - int successfulClusters = localClusters; - for (ClusterSearchShardsResponse searchShardsResponse : searchShardsResponses.values()) { - if (searchShardsResponse != ClusterSearchShardsResponse.EMPTY) { - successfulClusters++; - } + static void collectSearchShards(IndicesOptions indicesOptions, String preference, String routing, AtomicInteger skippedClusters, + Map remoteIndicesByCluster, RemoteClusterService remoteClusterService, + ThreadPool threadPool, ActionListener> listener) { + final CountDown responsesCountDown = new CountDown(remoteIndicesByCluster.size()); + final Map searchShardsResponses = new ConcurrentHashMap<>(); + final AtomicReference transportException = new AtomicReference<>(); + for (Map.Entry entry : remoteIndicesByCluster.entrySet()) { + final String clusterAlias = entry.getKey(); + boolean skipUnavailable = remoteClusterService.isSkipUnavailable(clusterAlias); + Client clusterClient = remoteClusterService.getRemoteClusterClient(threadPool, clusterAlias); + final String[] indices = entry.getValue().indices(); + ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest(indices) + .indicesOptions(indicesOptions).local(true).preference(preference).routing(routing); + clusterClient.admin().cluster().searchShards(searchShardsRequest, new ActionListener() { + @Override + public void onResponse(ClusterSearchShardsResponse response) { + searchShardsResponses.put(clusterAlias, response); + maybeFinish(); + } + + @Override + public void onFailure(Exception e) { + if (skipUnavailable) { + skippedClusters.incrementAndGet(); + } else { + RemoteTransportException exception = + new RemoteTransportException("error while communicating with remote cluster [" + clusterAlias + "]", e); + if (transportException.compareAndSet(null, exception) == false) { + transportException.accumulateAndGet(exception, (previous, current) -> { + current.addSuppressed(previous); + return current; + }); + } + } + maybeFinish(); + } + + private void maybeFinish() { + if (responsesCountDown.countDown()) { + RemoteTransportException exception = transportException.get(); + if (exception == null) { + listener.onResponse(searchShardsResponses); + } else { + listener.onFailure(transportException.get()); + } + } + } + } + ); } - int skippedClusters = totalClusters - successfulClusters; - return new SearchResponse.Clusters(totalClusters, successfulClusters, skippedClusters); } static BiFunction processRemoteShards(Map searchShardsResponses, diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index 7ea55925262ff..d7e3de92e4028 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -25,9 +25,6 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction; -import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest; -import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; @@ -62,7 +59,6 @@ import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; @@ -172,6 +168,13 @@ void updateSkipUnavailable(boolean skipUnavailable) { this.skipUnavailable = skipUnavailable; } + /** + * Returns whether this cluster is configured to be skipped when unavailable + */ + boolean isSkipUnavailable() { + return skipUnavailable; + } + @Override public void onNodeDisconnected(DiscoveryNode node) { boolean remove = connectedNodes.remove(node); @@ -181,31 +184,11 @@ public void onNodeDisconnected(DiscoveryNode node) { } } - /** - * Fetches all shards for the search request from this remote connection. This is used to later run the search on the remote end. - */ - public void fetchSearchShards(ClusterSearchShardsRequest searchRequest, - ActionListener listener) { - - final ActionListener searchShardsListener; - final Consumer onConnectFailure; - if (skipUnavailable) { - onConnectFailure = (exception) -> listener.onResponse(ClusterSearchShardsResponse.EMPTY); - searchShardsListener = ActionListener.wrap(listener::onResponse, (e) -> listener.onResponse(ClusterSearchShardsResponse.EMPTY)); - } else { - onConnectFailure = listener::onFailure; - searchShardsListener = listener; - } - // in case we have no connected nodes we try to connect and if we fail we either notify the listener or not depending on - // the skip_unavailable setting - ensureConnected(ActionListener.wrap((x) -> fetchShardsInternal(searchRequest, searchShardsListener), onConnectFailure)); - } - /** * Ensures that this cluster is connected. If the cluster is connected this operation * will invoke the listener immediately. */ - public void ensureConnected(ActionListener voidActionListener) { + void ensureConnected(ActionListener voidActionListener) { if (connectedNodes.size() == 0) { connectHandler.connect(voidActionListener); } else { @@ -213,35 +196,6 @@ public void ensureConnected(ActionListener voidActionListener) { } } - private void fetchShardsInternal(ClusterSearchShardsRequest searchShardsRequest, - final ActionListener listener) { - final DiscoveryNode node = getAnyConnectedNode(); - Transport.Connection connection = connectionManager.getConnection(node); - transportService.sendRequest(connection, ClusterSearchShardsAction.NAME, searchShardsRequest, TransportRequestOptions.EMPTY, - new TransportResponseHandler() { - - @Override - public ClusterSearchShardsResponse read(StreamInput in) throws IOException { - return new ClusterSearchShardsResponse(in); - } - - @Override - public void handleResponse(ClusterSearchShardsResponse clusterSearchShardsResponse) { - listener.onResponse(clusterSearchShardsResponse); - } - - @Override - public void handleException(TransportException e) { - listener.onFailure(e); - } - - @Override - public String executor() { - return ThreadPool.Names.SEARCH; - } - }); - } - /** * Collects all nodes on the connected cluster and returns / passes a nodeID to {@link DiscoveryNode} lookup function * that returns null if the node ID is not found. diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java index d9fcb01df4ce8..7d19b2eebcb1d 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java @@ -24,8 +24,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; -import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest; -import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; @@ -50,10 +48,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.Predicate; @@ -287,7 +283,7 @@ public Map groupIndices(IndicesOptions indicesOptions, String clusterAlias = entry.getKey(); List originalIndices = entry.getValue(); originalIndicesMap.put(clusterAlias, - new OriginalIndices(originalIndices.toArray(new String[originalIndices.size()]), indicesOptions)); + new OriginalIndices(originalIndices.toArray(new String[0]), indicesOptions)); } } } else { @@ -311,55 +307,6 @@ public Set getRegisteredRemoteClusterNames() { return remoteClusters.keySet(); } - public void collectSearchShards(IndicesOptions indicesOptions, String preference, String routing, - Map remoteIndicesByCluster, - ActionListener> listener) { - final CountDown responsesCountDown = new CountDown(remoteIndicesByCluster.size()); - final Map searchShardsResponses = new ConcurrentHashMap<>(); - final AtomicReference transportException = new AtomicReference<>(); - for (Map.Entry entry : remoteIndicesByCluster.entrySet()) { - final String clusterName = entry.getKey(); - RemoteClusterConnection remoteClusterConnection = remoteClusters.get(clusterName); - if (remoteClusterConnection == null) { - throw new IllegalArgumentException("no such remote cluster: " + clusterName); - } - final String[] indices = entry.getValue().indices(); - ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest(indices) - .indicesOptions(indicesOptions).local(true).preference(preference) - .routing(routing); - remoteClusterConnection.fetchSearchShards(searchShardsRequest, - new ActionListener() { - @Override - public void onResponse(ClusterSearchShardsResponse clusterSearchShardsResponse) { - searchShardsResponses.put(clusterName, clusterSearchShardsResponse); - if (responsesCountDown.countDown()) { - RemoteTransportException exception = transportException.get(); - if (exception == null) { - listener.onResponse(searchShardsResponses); - } else { - listener.onFailure(transportException.get()); - } - } - } - - @Override - public void onFailure(Exception e) { - RemoteTransportException exception = - new RemoteTransportException("error while communicating with remote cluster [" + clusterName + "]", e); - if (transportException.compareAndSet(null, exception) == false) { - exception = transportException.accumulateAndGet(exception, (previous, current) -> { - current.addSuppressed(previous); - return current; - }); - } - if (responsesCountDown.countDown()) { - listener.onFailure(exception); - } - } - }); - } - } - /** * Returns a connection to the given node on the given remote cluster * @throws IllegalArgumentException if the remote cluster is unknown @@ -376,6 +323,13 @@ void ensureConnected(String clusterAlias, ActionListener listener) { getRemoteClusterConnection(clusterAlias).ensureConnected(listener); } + /** + * Returns whether the cluster identified by the provided alias is configured to be skipped when unavailable + */ + public boolean isSkipUnavailable(String clusterAlias) { + return getRemoteClusterConnection(clusterAlias).isSkipUnavailable(); + } + public Transport.Connection getConnection(String cluster) { return getRemoteClusterConnection(cluster).getConnection(); } @@ -399,7 +353,7 @@ public void listenForUpdates(ClusterSettings clusterSettings) { clusterSettings.addAffixUpdateConsumer(SEARCH_REMOTE_CLUSTER_SKIP_UNAVAILABLE, this::updateSkipUnavailable, (alias, value) -> {}); } - synchronized void updateSkipUnavailable(String clusterAlias, Boolean skipUnavailable) { + private synchronized void updateSkipUnavailable(String clusterAlias, Boolean skipUnavailable) { RemoteClusterConnection remote = this.remoteClusters.get(clusterAlias); if (remote != null) { remote.updateSkipUnavailable(skipUnavailable); @@ -510,5 +464,4 @@ public Client getRemoteClusterClient(ThreadPool threadPool, String clusterAlias) Collection getConnections() { return remoteClusters.values(); } - } diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index 16ff4389d7c4a..1b99beee65e81 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse; @@ -38,13 +39,19 @@ import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.NodeDisconnectedException; +import org.elasticsearch.transport.RemoteClusterConnectionTests; import org.elasticsearch.transport.RemoteClusterService; +import org.elasticsearch.transport.RemoteClusterServiceTests; +import org.elasticsearch.transport.RemoteTransportException; import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportConnectionListener; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; @@ -53,13 +60,22 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiFunction; import java.util.function.Function; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.awaitLatch; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.startsWith; public class TransportSearchActionTests extends ESTestCase { @@ -304,41 +320,169 @@ public void close() { } } - public void testBuildClusters() { - OriginalIndices localIndices = randomBoolean() ? null : randomOriginalIndices(); - Map remoteIndices = new HashMap<>(); - Map searchShardsResponses = new HashMap<>(); - int numRemoteClusters = randomIntBetween(0, 10); - boolean onlySuccessful = randomBoolean(); - int localClusters = localIndices == null ? 0 : 1; - int total = numRemoteClusters + localClusters; - int successful = localClusters; - int skipped = 0; - for (int i = 0; i < numRemoteClusters; i++) { - String cluster = randomAlphaOfLengthBetween(5, 10); - remoteIndices.put(cluster, randomOriginalIndices()); - if (onlySuccessful || randomBoolean()) { - //whatever response counts as successful as long as it's not the empty placeholder - searchShardsResponses.put(cluster, new ClusterSearchShardsResponse(null, null, null)); - successful++; - } else { - searchShardsResponses.put(cluster, ClusterSearchShardsResponse.EMPTY); - skipped++; - } - } - SearchResponse.Clusters clusters = TransportSearchAction.buildClusters(localIndices, remoteIndices, searchShardsResponses); - assertEquals(total, clusters.getTotal()); - assertEquals(successful, clusters.getSuccessful()); - assertEquals(skipped, clusters.getSkipped()); + private MockTransportService startTransport(String id, List knownNodes) { + return RemoteClusterConnectionTests.startTransport(id, knownNodes, Version.CURRENT, threadPool); } - private static OriginalIndices randomOriginalIndices() { - int numLocalIndices = randomIntBetween(0, 5); - String[] localIndices = new String[numLocalIndices]; - for (int i = 0; i < numLocalIndices; i++) { - localIndices[i] = randomAlphaOfLengthBetween(3, 10); + public void testCollectSearchShards() throws Exception { + int numClusters = randomIntBetween(2, 10); + MockTransportService[] mockTransportServices = new MockTransportService[numClusters]; + DiscoveryNode[] nodes = new DiscoveryNode[numClusters]; + Map remoteIndicesByCluster = new HashMap<>(); + Settings.Builder builder = Settings.builder(); + for (int i = 0; i < numClusters; i++) { + List knownNodes = new CopyOnWriteArrayList<>(); + MockTransportService remoteSeedTransport = startTransport("node_remote" + i, knownNodes); + mockTransportServices[i] = remoteSeedTransport; + DiscoveryNode remoteSeedNode = remoteSeedTransport.getLocalDiscoNode(); + knownNodes.add(remoteSeedNode); + nodes[i] = remoteSeedNode; + builder.put("cluster.remote.remote" + i + ".seeds", remoteSeedNode.getAddress().toString()); + remoteIndicesByCluster.put("remote" + i, new OriginalIndices(new String[]{"index"}, IndicesOptions.lenientExpandOpen())); + } + Settings settings = builder.build(); + + try { + try (MockTransportService service = MockTransportService.createNewService(settings, Version.CURRENT, threadPool, null)) { + service.start(); + service.acceptIncomingRequests(); + RemoteClusterService remoteClusterService = service.getRemoteClusterService(); + { + final CountDownLatch latch = new CountDownLatch(1); + AtomicReference> response = new AtomicReference<>(); + AtomicInteger skippedClusters = new AtomicInteger(); + TransportSearchAction.collectSearchShards(IndicesOptions.lenientExpandOpen(), null, null, skippedClusters, + remoteIndicesByCluster, remoteClusterService, threadPool, + new LatchedActionListener<>(ActionListener.wrap(response::set, e -> fail("no failures expected")), latch)); + awaitLatch(latch, 5, TimeUnit.SECONDS); + assertEquals(0, skippedClusters.get()); + assertNotNull(response.get()); + Map map = response.get(); + assertEquals(numClusters, map.size()); + for (int i = 0; i < numClusters; i++) { + String clusterAlias = "remote" + i; + assertTrue(map.containsKey(clusterAlias)); + ClusterSearchShardsResponse shardsResponse = map.get(clusterAlias); + assertEquals(1, shardsResponse.getNodes().length); + } + } + { + final CountDownLatch latch = new CountDownLatch(1); + AtomicReference failure = new AtomicReference<>(); + AtomicInteger skippedClusters = new AtomicInteger(0); + TransportSearchAction.collectSearchShards(IndicesOptions.lenientExpandOpen(), "index_not_found", null, skippedClusters, + remoteIndicesByCluster, remoteClusterService, threadPool, + new LatchedActionListener<>(ActionListener.wrap(r -> fail("no response expected"), failure::set), latch)); + awaitLatch(latch, 5, TimeUnit.SECONDS); + assertEquals(0, skippedClusters.get()); + assertNotNull(failure.get()); + assertThat(failure.get(), instanceOf(RemoteTransportException.class)); + RemoteTransportException remoteTransportException = (RemoteTransportException) failure.get(); + assertEquals(RestStatus.NOT_FOUND, remoteTransportException.status()); + } + + int numDisconnectedClusters = randomIntBetween(1, numClusters); + Set disconnectedNodes = new HashSet<>(numDisconnectedClusters); + Set disconnectedNodesIndices = new HashSet<>(numDisconnectedClusters); + while (disconnectedNodes.size() < numDisconnectedClusters) { + int i = randomIntBetween(0, numClusters - 1); + if (disconnectedNodes.add(nodes[i])) { + assertTrue(disconnectedNodesIndices.add(i)); + } + } + + CountDownLatch disconnectedLatch = new CountDownLatch(numDisconnectedClusters); + RemoteClusterServiceTests.addConnectionListener(remoteClusterService, new TransportConnectionListener() { + @Override + public void onNodeDisconnected(DiscoveryNode node) { + if (disconnectedNodes.remove(node)) { + disconnectedLatch.countDown(); + } + } + }); + for (DiscoveryNode disconnectedNode : disconnectedNodes) { + service.addFailToSendNoConnectRule(disconnectedNode.getAddress()); + } + + { + final CountDownLatch latch = new CountDownLatch(1); + AtomicInteger skippedClusters = new AtomicInteger(0); + AtomicReference failure = new AtomicReference<>(); + TransportSearchAction.collectSearchShards(IndicesOptions.lenientExpandOpen(), null, null, skippedClusters, + remoteIndicesByCluster, remoteClusterService, threadPool, + new LatchedActionListener<>(ActionListener.wrap(r -> fail("no response expected"), failure::set), latch)); + awaitLatch(latch, 5, TimeUnit.SECONDS); + assertEquals(0, skippedClusters.get()); + assertNotNull(failure.get()); + assertThat(failure.get(), instanceOf(RemoteTransportException.class)); + assertThat(failure.get().getMessage(), containsString("error while communicating with remote cluster [")); + assertThat(failure.get().getCause(), instanceOf(NodeDisconnectedException.class)); + } + + //setting skip_unavailable to true for all the disconnected clusters will make the request succeed again + for (int i : disconnectedNodesIndices) { + RemoteClusterServiceTests.updateSkipUnavailable(remoteClusterService, "remote" + i, true); + } + + { + final CountDownLatch latch = new CountDownLatch(1); + AtomicInteger skippedClusters = new AtomicInteger(0); + AtomicReference> response = new AtomicReference<>(); + TransportSearchAction.collectSearchShards(IndicesOptions.lenientExpandOpen(), null, null, skippedClusters, + remoteIndicesByCluster, remoteClusterService, threadPool, + new LatchedActionListener<>(ActionListener.wrap(response::set, e -> fail("no failures expected")), latch)); + awaitLatch(latch, 5, TimeUnit.SECONDS); + assertNotNull(response.get()); + Map map = response.get(); + assertEquals(numClusters - disconnectedNodesIndices.size(), map.size()); + assertEquals(skippedClusters.get(), disconnectedNodesIndices.size()); + for (int i = 0; i < numClusters; i++) { + String clusterAlias = "remote" + i; + if (disconnectedNodesIndices.contains(i)) { + assertFalse(map.containsKey(clusterAlias)); + } else { + assertNotNull(map.get(clusterAlias)); + } + } + } + + //give transport service enough time to realize that the node is down, and to notify the connection listeners + //so that RemoteClusterConnection is left with no connected nodes, hence it will retry connecting next + assertTrue(disconnectedLatch.await(5, TimeUnit.SECONDS)); + + service.clearAllRules(); + if (randomBoolean()) { + for (int i : disconnectedNodesIndices) { + if (randomBoolean()) { + RemoteClusterServiceTests.updateSkipUnavailable(remoteClusterService, "remote" + i, true); + } + + } + } + { + final CountDownLatch latch = new CountDownLatch(1); + AtomicInteger skippedClusters = new AtomicInteger(0); + AtomicReference> response = new AtomicReference<>(); + TransportSearchAction.collectSearchShards(IndicesOptions.lenientExpandOpen(), null, null, skippedClusters, + remoteIndicesByCluster, remoteClusterService, threadPool, + new LatchedActionListener<>(ActionListener.wrap(response::set, e -> fail("no failures expected")), latch)); + awaitLatch(latch, 5, TimeUnit.SECONDS); + assertEquals(0, skippedClusters.get()); + assertNotNull(response.get()); + Map map = response.get(); + assertEquals(numClusters, map.size()); + for (int i = 0; i < numClusters; i++) { + String clusterAlias = "remote" + i; + assertTrue(map.containsKey(clusterAlias)); + assertNotNull(map.get(clusterAlias)); + } + } + assertEquals(0, service.getConnectionManager().size()); + } + } finally { + for (MockTransportService mockTransportService : mockTransportServices) { + mockTransportService.close(); + } } - return new OriginalIndices(localIndices, IndicesOptions.fromOptions(randomBoolean(), - randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); } } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java new file mode 100644 index 0000000000000..1a6eaff9e5a2e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java @@ -0,0 +1,140 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest; +import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +public class RemoteClusterAwareClientTests extends ESTestCase { + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); + } + + private MockTransportService startTransport(String id, List knownNodes) { + return RemoteClusterConnectionTests.startTransport(id, knownNodes, Version.CURRENT, threadPool); + } + + public void testSearchShards() throws Exception { + List knownNodes = new CopyOnWriteArrayList<>(); + try (MockTransportService seedTransport = startTransport("seed_node", knownNodes); + MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes)) { + knownNodes.add(seedTransport.getLocalDiscoNode()); + knownNodes.add(discoverableTransport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + Settings.Builder builder = Settings.builder(); + builder.putList("cluster.remote.cluster1.seeds", seedTransport.getLocalDiscoNode().getAddress().toString()); + try (MockTransportService service = MockTransportService.createNewService(builder.build(), Version.CURRENT, threadPool, null)) { + service.start(); + service.acceptIncomingRequests(); + + try (RemoteClusterAwareClient client = new RemoteClusterAwareClient(Settings.EMPTY, threadPool, service, "cluster1")) { + SearchRequest request = new SearchRequest("test-index"); + CountDownLatch responseLatch = new CountDownLatch(1); + AtomicReference reference = new AtomicReference<>(); + ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest("test-index") + .indicesOptions(request.indicesOptions()).local(true).preference(request.preference()) + .routing(request.routing()); + client.admin().cluster().searchShards(searchShardsRequest, + new LatchedActionListener<>(ActionListener.wrap(reference::set, e -> fail("no failures expected")), responseLatch)); + responseLatch.await(); + assertNotNull(reference.get()); + ClusterSearchShardsResponse clusterSearchShardsResponse = reference.get(); + assertEquals(knownNodes, Arrays.asList(clusterSearchShardsResponse.getNodes())); + } + } + } + } + + public void testSearchShardsThreadContextHeader() { + List knownNodes = new CopyOnWriteArrayList<>(); + try (MockTransportService seedTransport = startTransport("seed_node", knownNodes); + MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes)) { + knownNodes.add(seedTransport.getLocalDiscoNode()); + knownNodes.add(discoverableTransport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + Settings.Builder builder = Settings.builder(); + builder.putList("cluster.remote.cluster1.seeds", seedTransport.getLocalDiscoNode().getAddress().toString()); + try (MockTransportService service = MockTransportService.createNewService(builder.build(), Version.CURRENT, threadPool, null)) { + service.start(); + service.acceptIncomingRequests(); + + try (RemoteClusterAwareClient client = new RemoteClusterAwareClient(Settings.EMPTY, threadPool, service, "cluster1")) { + SearchRequest request = new SearchRequest("test-index"); + int numThreads = 10; + ExecutorService executorService = Executors.newFixedThreadPool(numThreads); + for (int i = 0; i < numThreads; i++) { + final String threadId = Integer.toString(i); + executorService.submit(() -> { + ThreadContext threadContext = seedTransport.threadPool.getThreadContext(); + threadContext.putHeader("threadId", threadId); + AtomicReference reference = new AtomicReference<>(); + final ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest("test-index") + .indicesOptions(request.indicesOptions()).local(true).preference(request.preference()) + .routing(request.routing()); + CountDownLatch responseLatch = new CountDownLatch(1); + client.admin().cluster().searchShards(searchShardsRequest, + new LatchedActionListener<>(ActionListener.wrap( + resp -> { + reference.set(resp); + assertEquals(threadId, seedTransport.threadPool.getThreadContext().getHeader("threadId")); + }, + e -> fail("no failures expected")), responseLatch)); + try { + responseLatch.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + assertNotNull(reference.get()); + ClusterSearchShardsResponse clusterSearchShardsResponse = reference.get(); + assertEquals(knownNodes, Arrays.asList(clusterSearchShardsResponse.getNodes())); + }); + } + ThreadPool.terminate(executorService, 5, TimeUnit.SECONDS); + } + } + } + } +} diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 02e701ed4bc86..308d330d54f61 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -43,7 +42,6 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CancellableThreads; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.core.internal.io.IOUtils; @@ -558,7 +556,7 @@ public void run() { } } - private List>> seedNodes(final DiscoveryNode... seedNodes) { + private static List>> seedNodes(final DiscoveryNode... seedNodes) { if (seedNodes.length == 0) { return Collections.emptyList(); } else if (seedNodes.length == 1) { @@ -570,205 +568,6 @@ private List>> seedNodes(final DiscoveryNo } } - public void testFetchShards() throws Exception { - List knownNodes = new CopyOnWriteArrayList<>(); - try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); - MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) { - DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); - knownNodes.add(seedTransport.getLocalDiscoNode()); - knownNodes.add(discoverableTransport.getLocalDiscoNode()); - Collections.shuffle(knownNodes, random()); - try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { - service.start(); - service.acceptIncomingRequests(); - final List>> seedNodes = seedNodes(seedNode); - try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - seedNodes, service, Integer.MAX_VALUE, n -> true, null)) { - if (randomBoolean()) { - updateSeedNodes(connection, seedNodes); - } - if (randomBoolean()) { - connection.updateSkipUnavailable(randomBoolean()); - } - SearchRequest request = new SearchRequest("test-index"); - CountDownLatch responseLatch = new CountDownLatch(1); - AtomicReference reference = new AtomicReference<>(); - AtomicReference failReference = new AtomicReference<>(); - ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest("test-index") - .indicesOptions(request.indicesOptions()).local(true).preference(request.preference()) - .routing(request.routing()); - connection.fetchSearchShards(searchShardsRequest, - new LatchedActionListener<>(ActionListener.wrap(reference::set, failReference::set), responseLatch)); - responseLatch.await(); - assertNull(failReference.get()); - assertNotNull(reference.get()); - ClusterSearchShardsResponse clusterSearchShardsResponse = reference.get(); - assertEquals(knownNodes, Arrays.asList(clusterSearchShardsResponse.getNodes())); - assertTrue(connection.assertNoRunningConnections()); - } - } - } - } - - public void testFetchShardsThreadContextHeader() throws Exception { - List knownNodes = new CopyOnWriteArrayList<>(); - try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); - MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) { - DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); - knownNodes.add(seedTransport.getLocalDiscoNode()); - knownNodes.add(discoverableTransport.getLocalDiscoNode()); - Collections.shuffle(knownNodes, random()); - try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { - service.start(); - service.acceptIncomingRequests(); - final List>> seedNodes = seedNodes(seedNode); - try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - seedNodes, service, Integer.MAX_VALUE, n -> true, null)) { - SearchRequest request = new SearchRequest("test-index"); - Thread[] threads = new Thread[10]; - for (int i = 0; i < threads.length; i++) { - final String threadId = Integer.toString(i); - threads[i] = new Thread(() -> { - ThreadContext threadContext = seedTransport.threadPool.getThreadContext(); - threadContext.putHeader("threadId", threadId); - AtomicReference reference = new AtomicReference<>(); - AtomicReference failReference = new AtomicReference<>(); - final ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest("test-index") - .indicesOptions(request.indicesOptions()).local(true).preference(request.preference()) - .routing(request.routing()); - CountDownLatch responseLatch = new CountDownLatch(1); - connection.fetchSearchShards(searchShardsRequest, - new LatchedActionListener<>(ActionListener.wrap( - resp -> { - reference.set(resp); - assertEquals(threadId, seedTransport.threadPool.getThreadContext().getHeader("threadId")); - }, - failReference::set), responseLatch)); - try { - responseLatch.await(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - assertNull(failReference.get()); - assertNotNull(reference.get()); - ClusterSearchShardsResponse clusterSearchShardsResponse = reference.get(); - assertEquals(knownNodes, Arrays.asList(clusterSearchShardsResponse.getNodes())); - }); - } - for (int i = 0; i < threads.length; i++) { - threads[i].start(); - } - - for (int i = 0; i < threads.length; i++) { - threads[i].join(); - } - assertTrue(connection.assertNoRunningConnections()); - } - } - } - } - - public void testFetchShardsSkipUnavailable() throws Exception { - List knownNodes = new CopyOnWriteArrayList<>(); - try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT)) { - DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); - knownNodes.add(seedNode); - try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { - service.start(); - service.acceptIncomingRequests(); - try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) { - ConnectionManager connectionManager = connection.getConnectionManager(); - - SearchRequest request = new SearchRequest("test-index"); - ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest("test-index") - .indicesOptions(request.indicesOptions()).local(true).preference(request.preference()) - .routing(request.routing()); - { - CountDownLatch responseLatch = new CountDownLatch(1); - AtomicReference reference = new AtomicReference<>(); - AtomicReference failReference = new AtomicReference<>(); - connection.fetchSearchShards(searchShardsRequest, - new LatchedActionListener<>(ActionListener.wrap(reference::set, failReference::set), responseLatch)); - assertTrue(responseLatch.await(10, TimeUnit.SECONDS)); - assertNull(failReference.get()); - assertNotNull(reference.get()); - ClusterSearchShardsResponse response = reference.get(); - assertTrue(response != ClusterSearchShardsResponse.EMPTY); - assertEquals(knownNodes, Arrays.asList(response.getNodes())); - } - - CountDownLatch disconnectedLatch = new CountDownLatch(1); - connectionManager.addListener(new TransportConnectionListener() { - @Override - public void onNodeDisconnected(DiscoveryNode node) { - if (node.equals(seedNode)) { - disconnectedLatch.countDown(); - } - } - }); - - service.addFailToSendNoConnectRule(seedTransport); - - if (randomBoolean()) { - connection.updateSkipUnavailable(false); - } - { - CountDownLatch responseLatch = new CountDownLatch(1); - AtomicReference reference = new AtomicReference<>(); - AtomicReference failReference = new AtomicReference<>(); - connection.fetchSearchShards(searchShardsRequest, - new LatchedActionListener<>(ActionListener.wrap((s) -> { - reference.set(s); - }, failReference::set), responseLatch)); - assertTrue(responseLatch.await(10, TimeUnit.SECONDS)); - assertNotNull(failReference.get()); - assertNull(reference.get()); - assertThat(failReference.get(), instanceOf(TransportException.class)); - } - - connection.updateSkipUnavailable(true); - { - CountDownLatch responseLatch = new CountDownLatch(1); - AtomicReference reference = new AtomicReference<>(); - AtomicReference failReference = new AtomicReference<>(); - connection.fetchSearchShards(searchShardsRequest, - new LatchedActionListener<>(ActionListener.wrap(reference::set, failReference::set), responseLatch)); - assertTrue(responseLatch.await(10, TimeUnit.SECONDS)); - assertNull(failReference.get()); - assertNotNull(reference.get()); - ClusterSearchShardsResponse response = reference.get(); - assertTrue(response == ClusterSearchShardsResponse.EMPTY); - } - - //give transport service enough time to realize that the node is down, and to notify the connection listeners - //so that RemoteClusterConnection is left with no connected nodes, hence it will retry connecting next - assertTrue(disconnectedLatch.await(10, TimeUnit.SECONDS)); - - if (randomBoolean()) { - connection.updateSkipUnavailable(false); - } - - service.clearAllRules(); - //check that we reconnect once the node is back up - { - CountDownLatch responseLatch = new CountDownLatch(1); - AtomicReference reference = new AtomicReference<>(); - AtomicReference failReference = new AtomicReference<>(); - connection.fetchSearchShards(searchShardsRequest, - new LatchedActionListener<>(ActionListener.wrap(reference::set, failReference::set), responseLatch)); - assertTrue(responseLatch.await(10, TimeUnit.SECONDS)); - assertNull(failReference.get()); - assertNotNull(reference.get()); - ClusterSearchShardsResponse response = reference.get(); - assertTrue(response != ClusterSearchShardsResponse.EMPTY); - assertEquals(knownNodes, Arrays.asList(response.getNodes())); - } - } - } - } - } - public void testTriggerUpdatesConcurrently() throws IOException, InterruptedException { List knownNodes = new CopyOnWriteArrayList<>(); try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index d5671eec21961..60f3ece86bcbe 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -20,9 +20,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.OriginalIndices; -import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; @@ -33,7 +31,6 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.core.internal.io.IOUtils; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.transport.MockTransportService; @@ -60,9 +57,7 @@ import java.util.function.Supplier; import java.util.stream.Collectors; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.awaitLatch; import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; @@ -711,172 +706,6 @@ public void onFailure(Exception e) { } } - public void testCollectSearchShards() throws Exception { - int numClusters = randomIntBetween(2, 10); - MockTransportService[] mockTransportServices = new MockTransportService[numClusters]; - DiscoveryNode[] nodes = new DiscoveryNode[numClusters]; - Map remoteIndicesByCluster = new HashMap<>(); - Settings.Builder builder = Settings.builder(); - for (int i = 0; i < numClusters; i++) { - List knownNodes = new CopyOnWriteArrayList<>(); - MockTransportService remoteSeedTransport = startTransport("node_remote" + i, knownNodes, Version.CURRENT); - mockTransportServices[i] = remoteSeedTransport; - DiscoveryNode remoteSeedNode = remoteSeedTransport.getLocalDiscoNode(); - knownNodes.add(remoteSeedNode); - nodes[i] = remoteSeedNode; - builder.put("cluster.remote.remote" + i + ".seeds", remoteSeedNode.getAddress().toString()); - remoteIndicesByCluster.put("remote" + i, new OriginalIndices(new String[]{"index"}, IndicesOptions.lenientExpandOpen())); - } - Settings settings = builder.build(); - - try { - try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { - service.start(); - service.acceptIncomingRequests(); - try (RemoteClusterService remoteClusterService = new RemoteClusterService(settings, service)) { - assertFalse(remoteClusterService.isCrossClusterSearchEnabled()); - remoteClusterService.initializeRemoteClusters(); - assertTrue(remoteClusterService.isCrossClusterSearchEnabled()); - { - final CountDownLatch latch = new CountDownLatch(1); - AtomicReference> response = new AtomicReference<>(); - AtomicReference failure = new AtomicReference<>(); - remoteClusterService.collectSearchShards(IndicesOptions.lenientExpandOpen(), null, null, remoteIndicesByCluster, - new LatchedActionListener<>(ActionListener.wrap(response::set, failure::set), latch)); - awaitLatch(latch, 5, TimeUnit.SECONDS); - assertNull(failure.get()); - assertNotNull(response.get()); - Map map = response.get(); - assertEquals(numClusters, map.size()); - for (int i = 0; i < numClusters; i++) { - String clusterAlias = "remote" + i; - assertTrue(map.containsKey(clusterAlias)); - ClusterSearchShardsResponse shardsResponse = map.get(clusterAlias); - assertEquals(1, shardsResponse.getNodes().length); - } - } - { - final CountDownLatch latch = new CountDownLatch(1); - AtomicReference> response = new AtomicReference<>(); - AtomicReference failure = new AtomicReference<>(); - remoteClusterService.collectSearchShards(IndicesOptions.lenientExpandOpen(), "index_not_found", - null, remoteIndicesByCluster, - new LatchedActionListener<>(ActionListener.wrap(response::set, failure::set), latch)); - awaitLatch(latch, 5, TimeUnit.SECONDS); - assertNull(response.get()); - assertNotNull(failure.get()); - assertThat(failure.get(), instanceOf(RemoteTransportException.class)); - RemoteTransportException remoteTransportException = (RemoteTransportException) failure.get(); - assertEquals(RestStatus.NOT_FOUND, remoteTransportException.status()); - } - int numDisconnectedClusters = randomIntBetween(1, numClusters); - Set disconnectedNodes = new HashSet<>(numDisconnectedClusters); - Set disconnectedNodesIndices = new HashSet<>(numDisconnectedClusters); - while(disconnectedNodes.size() < numDisconnectedClusters) { - int i = randomIntBetween(0, numClusters - 1); - if (disconnectedNodes.add(nodes[i])) { - assertTrue(disconnectedNodesIndices.add(i)); - } - } - - CountDownLatch disconnectedLatch = new CountDownLatch(numDisconnectedClusters); - for (RemoteClusterConnection connection : remoteClusterService.getConnections()) { - connection.getConnectionManager().addListener(new TransportConnectionListener() { - @Override - public void onNodeDisconnected(DiscoveryNode node) { - if (disconnectedNodes.remove(node)) { - disconnectedLatch.countDown(); - } - } - }); - } - - for (DiscoveryNode disconnectedNode : disconnectedNodes) { - service.addFailToSendNoConnectRule(disconnectedNode.getAddress()); - } - - { - final CountDownLatch latch = new CountDownLatch(1); - AtomicReference> response = new AtomicReference<>(); - AtomicReference failure = new AtomicReference<>(); - remoteClusterService.collectSearchShards(IndicesOptions.lenientExpandOpen(), null, null, remoteIndicesByCluster, - new LatchedActionListener<>(ActionListener.wrap(response::set, failure::set), latch)); - awaitLatch(latch, 5, TimeUnit.SECONDS); - assertNull(response.get()); - assertNotNull(failure.get()); - assertThat(failure.get(), instanceOf(RemoteTransportException.class)); - assertThat(failure.get().getMessage(), containsString("error while communicating with remote cluster [")); - assertThat(failure.get().getCause(), instanceOf(NodeDisconnectedException.class)); - } - - //setting skip_unavailable to true for all the disconnected clusters will make the request succeed again - for (int i : disconnectedNodesIndices) { - remoteClusterService.updateSkipUnavailable("remote" + i, true); - } - { - final CountDownLatch latch = new CountDownLatch(1); - AtomicReference> response = new AtomicReference<>(); - AtomicReference failure = new AtomicReference<>(); - remoteClusterService.collectSearchShards(IndicesOptions.lenientExpandOpen(), null, null, remoteIndicesByCluster, - new LatchedActionListener<>(ActionListener.wrap(response::set, failure::set), latch)); - awaitLatch(latch, 5, TimeUnit.SECONDS); - assertNull(failure.get()); - assertNotNull(response.get()); - Map map = response.get(); - assertEquals(numClusters, map.size()); - for (int i = 0; i < numClusters; i++) { - String clusterAlias = "remote" + i; - assertTrue(map.containsKey(clusterAlias)); - ClusterSearchShardsResponse shardsResponse = map.get(clusterAlias); - if (disconnectedNodesIndices.contains(i)) { - assertTrue(shardsResponse == ClusterSearchShardsResponse.EMPTY); - } else { - assertTrue(shardsResponse != ClusterSearchShardsResponse.EMPTY); - } - } - } - - //give transport service enough time to realize that the node is down, and to notify the connection listeners - //so that RemoteClusterConnection is left with no connected nodes, hence it will retry connecting next - assertTrue(disconnectedLatch.await(5, TimeUnit.SECONDS)); - - service.clearAllRules(); - if (randomBoolean()) { - for (int i : disconnectedNodesIndices) { - if (randomBoolean()) { - remoteClusterService.updateSkipUnavailable("remote" + i, true); - } - - } - } - { - final CountDownLatch latch = new CountDownLatch(1); - AtomicReference> response = new AtomicReference<>(); - AtomicReference failure = new AtomicReference<>(); - remoteClusterService.collectSearchShards(IndicesOptions.lenientExpandOpen(), null, null, remoteIndicesByCluster, - new LatchedActionListener<>(ActionListener.wrap(response::set, failure::set), latch)); - awaitLatch(latch, 5, TimeUnit.SECONDS); - assertNull(failure.get()); - assertNotNull(response.get()); - Map map = response.get(); - assertEquals(numClusters, map.size()); - for (int i = 0; i < numClusters; i++) { - String clusterAlias = "remote" + i; - assertTrue(map.containsKey(clusterAlias)); - ClusterSearchShardsResponse shardsResponse = map.get(clusterAlias); - assertNotSame(ClusterSearchShardsResponse.EMPTY, shardsResponse); - } - } - assertEquals(0, service.getConnectionManager().size()); - } - } - } finally { - for (MockTransportService mockTransportService : mockTransportServices) { - mockTransportService.close(); - } - } - } - public void testRemoteClusterSkipIfDisconnectedSetting() { { Settings settings = Settings.builder() @@ -1079,7 +908,7 @@ public void testRemoteClusterWithProxy() throws Exception { } } - private void updateRemoteCluster(RemoteClusterService service, String clusterAlias, List addresses, String proxyAddress) + private static void updateRemoteCluster(RemoteClusterService service, String clusterAlias, List addresses, String proxyAddress) throws Exception { CountDownLatch latch = new CountDownLatch(1); AtomicReference exceptionAtomicReference = new AtomicReference<>(); @@ -1093,4 +922,40 @@ private void updateRemoteCluster(RemoteClusterService service, String clusterAli throw exceptionAtomicReference.get(); } } + + public static void updateSkipUnavailable(RemoteClusterService service, String clusterAlias, boolean skipUnavailable) { + RemoteClusterConnection connection = service.getRemoteClusterConnection(clusterAlias); + connection.updateSkipUnavailable(skipUnavailable); + } + + public static void addConnectionListener(RemoteClusterService service, TransportConnectionListener listener) { + for (RemoteClusterConnection connection : service.getConnections()) { + ConnectionManager connectionManager = connection.getConnectionManager(); + connectionManager.addListener(listener); + } + } + + public void testSkipUnavailable() { + List knownNodes = new CopyOnWriteArrayList<>(); + try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT)) { + DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); + knownNodes.add(seedNode); + Settings.Builder builder = Settings.builder(); + builder.putList("cluster.remote.cluster1.seeds", seedTransport.getLocalDiscoNode().getAddress().toString()); + try (MockTransportService service = MockTransportService.createNewService(builder.build(), Version.CURRENT, threadPool, null)) { + service.start(); + service.acceptIncomingRequests(); + + assertFalse(service.getRemoteClusterService().isSkipUnavailable("cluster1")); + + if (randomBoolean()) { + updateSkipUnavailable(service.getRemoteClusterService(), "cluster1", false); + assertFalse(service.getRemoteClusterService().isSkipUnavailable("cluster1")); + } + + updateSkipUnavailable(service.getRemoteClusterService(), "cluster1", true); + assertTrue(service.getRemoteClusterService().isSkipUnavailable("cluster1")); + } + } + } } From 6a5d9d942a7537a17e7ef146c6fd0616dc3a815e Mon Sep 17 00:00:00 2001 From: David Roberts Date: Wed, 23 Jan 2019 13:50:31 +0000 Subject: [PATCH 24/24] [TEST] Mute MlMappingsUpgradeIT testMappingsUpgrade Due to https://github.com/elastic/elasticsearch/issues/37763 --- .../java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java index 5602f14ef2267..ca41afe6c39da 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java @@ -38,6 +38,7 @@ protected Collection templatesToWaitFor() { * The purpose of this test is to ensure that when a job is open through a rolling upgrade we upgrade the results * index mappings when it is assigned to an upgraded node even if no other ML endpoint is called after the upgrade */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37763") public void testMappingsUpgrade() throws Exception { switch (CLUSTER_TYPE) {