diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java index 366060835d891..50a0c85c041c8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java @@ -42,6 +42,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.search.internal.SearchContext; +import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.Arrays; @@ -53,7 +54,7 @@ public class AutoDateHistogramAggregationBuilder public static final String NAME = "auto_date_histogram"; - public static final ParseField NUM_BUCKETS_FIELD = new ParseField("buckets"); + private static final ParseField NUM_BUCKETS_FIELD = new ParseField("buckets"); private static final ObjectParser PARSER; static { @@ -63,6 +64,29 @@ public class AutoDateHistogramAggregationBuilder PARSER.declareInt(AutoDateHistogramAggregationBuilder::setNumBuckets, NUM_BUCKETS_FIELD); } + /** + * + * Build roundings, computed dynamically as roundings are time zone dependent. + * The current implementation probably should not be invoked in a tight loop. + * @return Array of RoundingInfo + */ + static RoundingInfo[] buildRoundings(DateTimeZone timeZone) { + RoundingInfo[] roundings = new RoundingInfo[6]; + roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone), + 1000L, 1, 5, 10, 30); + roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone), + 60 * 1000L, 1, 5, 10, 30); + roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone), + 60 * 60 * 1000L, 1, 3, 12); + roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH, timeZone), + 24 * 60 * 60 * 1000L, 1, 7); + roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR, timeZone), + 30 * 24 * 60 * 60 * 1000L, 1, 3); + roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY, timeZone), + 365 * 24 * 60 * 60 * 1000L, 1, 5, 10, 20, 50, 100); + return roundings; + } + public static AutoDateHistogramAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { return PARSER.parse(parser, new AutoDateHistogramAggregationBuilder(aggregationName), null); } @@ -116,14 +140,7 @@ public int getNumBuckets() { @Override protected ValuesSourceAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig config, AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { - RoundingInfo[] roundings = new RoundingInfo[6]; - roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE), 1000L, 1, 5, 10, 30); - roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR), 60 * 1000L, 1, 5, 10, 30); - roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY), 60 * 60 * 1000L, 1, 3, 12); - roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH), 24 * 60 * 60 * 1000L, 1, 7); - roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR), 30 * 24 * 60 * 60 * 1000L, 1, 3); - roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY), 365 * 24 * 60 * 60 * 1000L, 1, 5, 10, 20, 50, 100); - + RoundingInfo[] roundings = buildRoundings(timeZone()); int maxRoundingInterval = Arrays.stream(roundings,0, roundings.length-1) .map(rounding -> rounding.innerIntervals) .flatMapToInt(Arrays::stream) @@ -139,10 +156,10 @@ public int getNumBuckets() { return new AutoDateHistogramAggregatorFactory(name, config, numBuckets, roundings, context, parent, subFactoriesBuilder, metaData); } - private Rounding createRounding(DateTimeUnit interval) { + private static Rounding createRounding(DateTimeUnit interval, DateTimeZone timeZone) { Rounding.Builder tzRoundingBuilder = Rounding.builder(interval); - if (timeZone() != null) { - tzRoundingBuilder.timeZone(timeZone()); + if (timeZone != null) { + tzRoundingBuilder.timeZone(timeZone); } Rounding rounding = tzRoundingBuilder.build(); return rounding; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java index 27c195cbdae75..6a78ca6724988 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -447,7 +447,8 @@ private BucketReduceResult addEmptyBuckets(BucketReduceResult currentResult, Red return new BucketReduceResult(list, roundingInfo, roundingIdx); } - private int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, RoundingInfo[] roundings) { + private int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, + RoundingInfo[] roundings) { if (roundingIdx == roundings.length - 1) { return roundingIdx; } @@ -509,7 +510,8 @@ public InternalAggregation doReduce(List aggregations, Redu pipelineAggregators(), getMetaData()); } - private BucketReduceResult maybeMergeConsecutiveBuckets(BucketReduceResult reducedBucketsResult, ReduceContext reduceContext) { + private BucketReduceResult maybeMergeConsecutiveBuckets(BucketReduceResult reducedBucketsResult, + ReduceContext reduceContext) { List buckets = reducedBucketsResult.buckets; RoundingInfo roundingInfo = reducedBucketsResult.roundingInfo; int roundingIdx = reducedBucketsResult.roundingIdx; @@ -539,7 +541,7 @@ private BucketReduceResult mergeConsecutiveBuckets(List reducedBuckets, key = roundingInfo.rounding.round(bucket.key); } reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(bucket) - 1); - sameKeyedBuckets.add(createBucket(key, bucket.docCount, bucket.aggregations)); + sameKeyedBuckets.add(new Bucket(Math.round(key), bucket.docCount, format, bucket.aggregations)); } if (sameKeyedBuckets.isEmpty() == false) { reduceContext.consumeBucketsAndMaybeBreak(1); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java index 96811ce424c91..a14fca63154d7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java @@ -20,8 +20,6 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.rounding.DateTimeUnit; -import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; @@ -51,14 +49,6 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati public void setUp() throws Exception { super.setUp(); format = randomNumericDocValueFormat(); - - roundingInfos = new RoundingInfo[6]; - roundingInfos[0] = new RoundingInfo(Rounding.builder(DateTimeUnit.SECOND_OF_MINUTE).build(), 1, 5, 10, 30); - roundingInfos[1] = new RoundingInfo(Rounding.builder(DateTimeUnit.MINUTES_OF_HOUR).build(), 1, 5, 10, 30); - roundingInfos[2] = new RoundingInfo(Rounding.builder(DateTimeUnit.HOUR_OF_DAY).build(), 1, 3, 12); - roundingInfos[3] = new RoundingInfo(Rounding.builder(DateTimeUnit.DAY_OF_MONTH).build(), 1, 7); - roundingInfos[4] = new RoundingInfo(Rounding.builder(DateTimeUnit.MONTH_OF_YEAR).build(), 1, 3); - roundingInfos[5] = new RoundingInfo(Rounding.builder(DateTimeUnit.YEAR_OF_CENTURY).build(), 1, 10, 20, 50, 100); } @Override @@ -66,6 +56,8 @@ protected InternalAutoDateHistogram createTestInstance(String name, List pipelineAggregators, Map metaData, InternalAggregations aggregations) { + + roundingInfos = AutoDateHistogramAggregationBuilder.buildRoundings(null); int nbBuckets = randomNumberOfBuckets(); int targetBuckets = randomIntBetween(1, nbBuckets * 2 + 1); List buckets = new ArrayList<>(nbBuckets); @@ -81,6 +73,7 @@ protected InternalAutoDateHistogram createTestInstance(String name, InternalAggregations subAggregations = new InternalAggregations(Collections.emptyList()); BucketInfo bucketInfo = new BucketInfo(roundingInfos, randomIntBetween(0, roundingInfos.length - 1), subAggregations); + return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData); } @@ -92,13 +85,50 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List expectedCounts = new TreeMap<>(); - for (Histogram histogram : inputs) { + RoundingInfo roundingInfo = roundingInfos[roundingIdx]; + + long lowest = Long.MAX_VALUE; + long highest = 0; + for (InternalAutoDateHistogram histogram : inputs) { for (Histogram.Bucket bucket : histogram.getBuckets()) { - expectedCounts.compute(roundingInfos[roundingIdx].rounding.round(((DateTime) bucket.getKey()).getMillis()), - (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + long bucketKey = ((DateTime) bucket.getKey()).getMillis(); + if (bucketKey < lowest) { + lowest = bucketKey; + } + if (bucketKey > highest) { + highest = bucketKey; + } + } + } + long normalizedDuration = (highest - lowest) / roundingInfo.getRoughEstimateDurationMillis(); + long innerIntervalToUse = 0; + for (int interval : roundingInfo.innerIntervals) { + if (normalizedDuration / interval < maxNumberOfBuckets()) { + innerIntervalToUse = interval; } } + Map expectedCounts = new TreeMap<>(); + long intervalInMillis = innerIntervalToUse*roundingInfo.getRoughEstimateDurationMillis(); + for (long keyForBucket = roundingInfo.rounding.round(lowest); + keyForBucket <= highest; + keyForBucket = keyForBucket + intervalInMillis) { + expectedCounts.put(keyForBucket, 0L); + + for (InternalAutoDateHistogram histogram : inputs) { + for (Histogram.Bucket bucket : histogram.getBuckets()) { + long bucketKey = ((DateTime) bucket.getKey()).getMillis(); + long roundedBucketKey = roundingInfo.rounding.round(bucketKey); + if (roundedBucketKey >= keyForBucket + && roundedBucketKey < keyForBucket + intervalInMillis) { + long count = bucket.getDocCount(); + expectedCounts.compute(keyForBucket, + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + count); + } + } + } + } + + Map actualCounts = new TreeMap<>(); for (Histogram.Bucket bucket : reduced.getBuckets()) { actualCounts.compute(((DateTime) bucket.getKey()).getMillis(), @@ -117,12 +147,6 @@ protected Class implementationClass() { return ParsedAutoDateHistogram.class; } - @Override - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32215") - public void testReduceRandom() { - super.testReduceRandom(); - } - @Override protected InternalAutoDateHistogram mutateInstance(InternalAutoDateHistogram instance) { String name = instance.getName();