diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-testing.txt b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-testing.txt index 65d01a2e174..1e56f5d06d5 100644 --- a/docs/apidiffs/current_vs_latest/opentelemetry-sdk-testing.txt +++ b/docs/apidiffs/current_vs_latest/opentelemetry-sdk-testing.txt @@ -58,6 +58,10 @@ Comparing source compatibility of against +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasBucketBoundaries(double[]) +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasBucketCounts(long[]) +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasCount(long) + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasExemplars(io.opentelemetry.sdk.metrics.data.DoubleExemplarData[]) + +++ NEW METHOD: PUBLIC(+) FINAL(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasExemplarsSatisfying(java.util.function.Consumer[]) + +++ NEW ANNOTATION: java.lang.SafeVarargs + +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasExemplarsSatisfying(java.lang.Iterable) +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasMax(double) +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasMin(double) +++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasSum(double) diff --git a/exporters/otlp/common/build.gradle.kts b/exporters/otlp/common/build.gradle.kts index eb75d08884f..0ee39d1d86a 100644 --- a/exporters/otlp/common/build.gradle.kts +++ b/exporters/otlp/common/build.gradle.kts @@ -50,7 +50,6 @@ dependencies { testRuntimeOnly("io.grpc:grpc-netty-shaded") jmhImplementation(project(":sdk:testing")) - jmhImplementation(project(":sdk:metrics-testing")) jmhImplementation(project(":sdk-extensions:resources")) jmhImplementation("com.fasterxml.jackson.core:jackson-core") jmhImplementation("io.opentelemetry.proto:opentelemetry-proto") diff --git a/micrometer1-shim/build.gradle.kts b/micrometer1-shim/build.gradle.kts index 753eae5b41a..b7dd688bb06 100644 --- a/micrometer1-shim/build.gradle.kts +++ b/micrometer1-shim/build.gradle.kts @@ -11,7 +11,6 @@ dependencies { api("io.micrometer:micrometer-core") - testImplementation(project(":sdk:metrics-testing")) testImplementation(project(":sdk:testing")) } @@ -25,7 +24,6 @@ testing { } } dependencies { - implementation(project(":sdk:metrics-testing")) implementation(project(":sdk:testing")) implementation(project.dependencies.enforcedPlatform("io.micrometer:micrometer-bom:1.5.17")) @@ -38,7 +36,6 @@ testing { } } dependencies { - implementation(project(":sdk:metrics-testing")) implementation(project(":sdk:testing")) implementation(project.dependencies.enforcedPlatform("io.micrometer:micrometer-bom:1.6.13")) diff --git a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/CounterTest.java b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/CounterTest.java index 792f71f5189..839b85d2aed 100644 --- a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/CounterTest.java +++ b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/CounterTest.java @@ -6,7 +6,7 @@ package io.opentelemetry.micrometer1shim; import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.micrometer.core.instrument.Counter; @@ -41,15 +41,14 @@ void testCounter() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test counter") .hasUnit("items") - .hasDoubleSum() - .isMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(3) - .attributes() - .containsOnly(attributeEntry("tag", "value")))); + .hasDoubleSumSatisfying( + sum -> + sum.isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasValue(3) + .hasAttributes(attributeEntry("tag", "value"))))); Metrics.globalRegistry.remove(counter); counter.increment(); @@ -60,8 +59,7 @@ void testCounter() { metric -> assertThat(metric) .hasName("testCounter") - .hasDoubleSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(3))); + .hasDoubleSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(3)))); } } diff --git a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/DistributionSummaryTest.java b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/DistributionSummaryTest.java index b1c671efc0e..a7c68e5504e 100644 --- a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/DistributionSummaryTest.java +++ b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/DistributionSummaryTest.java @@ -6,7 +6,7 @@ package io.opentelemetry.micrometer1shim; import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.micrometer.core.instrument.DistributionSummary; @@ -42,27 +42,25 @@ void testMicrometerDistributionSummary() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test distribution summary") .hasUnit("things") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasSum(7) - .hasCount(3) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasSum(7) + .hasCount(3) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testSummary.max") .hasDescription("This is a test distribution summary") - .hasDoubleGauge() - .points() - .anySatisfy( - point -> - assertThat(point) - .hasValue(4) - .attributes() - .containsEntry("tag", "value"))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(4) + .hasAttributes(attributeEntry("tag", "value"))))); Metrics.globalRegistry.remove(summary); @@ -73,15 +71,14 @@ void testMicrometerDistributionSummary() { metric -> assertThat(metric) .hasName("testSummary") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasSum(7) - .hasCount(3) - .attributes() - .containsOnly(attributeEntry("tag", "value")))); + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasSum(7) + .hasCount(3) + .hasAttributes(attributeEntry("tag", "value"))))); } @Test @@ -109,44 +106,55 @@ void testMicrometerHistogram() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test distribution summary") .hasUnit("things") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasSum(555.5) - .hasCount(4) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + points -> + points + .hasSum(555.5) + .hasCount(4) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testSummary.max") .hasDescription("This is a test distribution summary") - .hasDoubleGauge() - .points() - .anySatisfy( - point -> - assertThat(point) - .hasValue(500) - .attributes() - .containsEntry("tag", "value")), + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(500) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testSummary.histogram") - .hasDoubleGauge() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point).hasValue(1).attributes().containsEntry("le", "1"), - point -> - assertThat(point).hasValue(2).attributes().containsEntry("le", "10"), - point -> - assertThat(point).hasValue(3).attributes().containsEntry("le", "100"), - point -> - assertThat(point) - .hasValue(4) - .attributes() - .containsEntry("le", "1000"))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(1) + .hasAttributes( + attributeEntry("le", "1"), + attributeEntry("tag", "value")), + point -> + point + .hasValue(2) + .hasAttributes( + attributeEntry("le", "10"), + attributeEntry("tag", "value")), + point -> + point + .hasValue(3) + .hasAttributes( + attributeEntry("le", "100"), + attributeEntry("tag", "value")), + point -> + point + .hasValue(4) + .hasAttributes( + attributeEntry("le", "1000"), + attributeEntry("tag", "value"))))); } @Test @@ -171,36 +179,42 @@ void testMicrometerPercentiles() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test distribution summary") .hasUnit("things") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasSum(150) - .hasCount(2) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasSum(150) + .hasCount(2) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testSummary.max") .hasDescription("This is a test distribution summary") - .hasDoubleGauge() - .points() - .anySatisfy( - point -> - assertThat(point) - .hasValue(100) - .attributes() - .containsEntry("tag", "value")), + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(100) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testSummary.percentile") - .hasDoubleGauge() - .points() - .anySatisfy(point -> assertThat(point).attributes().containsEntry("phi", "0.5")) - .anySatisfy( - point -> assertThat(point).attributes().containsEntry("phi", "0.95")) - .anySatisfy( - point -> assertThat(point).attributes().containsEntry("phi", "0.99"))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point.hasAttributes( + attributeEntry("phi", "0.5"), + attributeEntry("tag", "value")), + point -> + point.hasAttributes( + attributeEntry("phi", "0.95"), + attributeEntry("tag", "value")), + point -> + point.hasAttributes( + attributeEntry("phi", "0.99"), + attributeEntry("tag", "value"))))); } } diff --git a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/FunctionCounterTest.java b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/FunctionCounterTest.java index 5d7a86a64d3..9ded1bdc39c 100644 --- a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/FunctionCounterTest.java +++ b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/FunctionCounterTest.java @@ -6,14 +6,15 @@ package io.opentelemetry.micrometer1shim; import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.micrometer.core.instrument.FunctionCounter; import io.micrometer.core.instrument.Metrics; +import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.metrics.internal.state.MetricStorageRegistry; import java.util.concurrent.atomic.AtomicLong; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; @@ -43,56 +44,59 @@ void testFunctionCounter() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test function counter") .hasUnit("items") - .hasDoubleSum() - .isMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(12) - .attributes() - .containsOnly(attributeEntry("tag", "value")))); + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point + .hasValue(12) + .hasAttributes(attributeEntry("tag", "value"))))); Metrics.globalRegistry.remove(counter); assertThat(testing.collectAllMetrics()).isEmpty(); } @Test - // TODO(anuraaga): Enable after https://github.com/open-telemetry/opentelemetry-java/pull/4222 - @Disabled - void functionCountersWithSameNameAndDifferentTags() { + @SuppressLogger(MetricStorageRegistry.class) + void functionCountersWithSameNameAndDifferentDescriptions() { FunctionCounter.builder("testFunctionCounterWithTags", num, AtomicLong::get) - .description("First description wins") + .description("First description") .tags("tag", "1") .baseUnit("items") .register(Metrics.globalRegistry); FunctionCounter.builder("testFunctionCounterWithTags", anotherNum, AtomicLong::get) - .description("ignored") + .description("Second description") .tags("tag", "2") .baseUnit("items") .register(Metrics.globalRegistry); assertThat(testing.collectAllMetrics()) - .satisfiesExactly( + .satisfiesExactlyInAnyOrder( + metric -> + assertThat(metric) + .hasName("testFunctionCounterWithTags") + .hasDescription("First description") + .hasUnit("items") + .hasDoubleSumSatisfying( + sum -> + sum.isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasValue(12) + .hasAttributes(attributeEntry("tag", "1")))), metric -> assertThat(metric) .hasName("testFunctionCounterWithTags") - .hasDescription("First description wins") + .hasDescription("Second description") .hasUnit("items") - .hasDoubleSum() - .isMonotonic() - .points() - .anySatisfy( - point -> - assertThat(point) - .hasValue(12) - .attributes() - .containsOnly(attributeEntry("tag", "1"))) - .anySatisfy( - point -> - assertThat(point) - .hasValue(13) - .attributes() - .containsOnly(attributeEntry("tag", "2")))); + .hasDoubleSumSatisfying( + sum -> + sum.isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasValue(13) + .hasAttributes(attributeEntry("tag", "2"))))); } } diff --git a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/FunctionTimerSecondsTest.java b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/FunctionTimerSecondsTest.java index ad11c09e505..2e42f8d4621 100644 --- a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/FunctionTimerSecondsTest.java +++ b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/FunctionTimerSecondsTest.java @@ -6,7 +6,7 @@ package io.opentelemetry.micrometer1shim; import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.micrometer.core.instrument.FunctionTimer; @@ -61,15 +61,14 @@ void testFunctionTimerWithBaseUnitSeconds() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test function timer") .hasUnit("1") - .hasLongSum() - .isMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(1) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasLongSumSatisfying( + sum -> + sum.isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasValue(1) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testFunctionTimerSeconds.sum") @@ -77,14 +76,13 @@ void testFunctionTimerWithBaseUnitSeconds() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test function timer") .hasUnit("s") - .hasDoubleSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(42) - .attributes() - .containsOnly(attributeEntry("tag", "value")))); + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point + .hasValue(42) + .hasAttributes(attributeEntry("tag", "value"))))); Metrics.globalRegistry.remove(functionTimer); assertThat(testing.collectAllMetrics()).isEmpty(); diff --git a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/FunctionTimerTest.java b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/FunctionTimerTest.java index a475d66088e..9446f5feb84 100644 --- a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/FunctionTimerTest.java +++ b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/FunctionTimerTest.java @@ -6,11 +6,12 @@ package io.opentelemetry.micrometer1shim; import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.micrometer.core.instrument.FunctionTimer; import io.micrometer.core.instrument.Metrics; +import io.opentelemetry.api.common.Attributes; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.BeforeEach; @@ -55,15 +56,14 @@ void testFunctionTimer() throws InterruptedException { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test function timer") .hasUnit("1") - .hasLongSum() - .isMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(1) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasLongSumSatisfying( + sum -> + sum.isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasValue(1) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testFunctionTimer.sum") @@ -71,14 +71,13 @@ void testFunctionTimer() throws InterruptedException { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test function timer") .hasUnit("ms") - .hasDoubleSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(42_000) - .attributes() - .containsOnly(attributeEntry("tag", "value")))); + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point + .hasValue(42_000) + .hasAttributes(attributeEntry("tag", "value"))))); Metrics.globalRegistry.remove(functionTimer); assertThat(testing.collectAllMetrics()).isEmpty(); @@ -102,9 +101,10 @@ void testNanoPrecision() { assertThat(metric) .hasName("testNanoFunctionTimer.sum") .hasUnit("ms") - .hasDoubleSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(1.234).attributes())); + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> point.hasValue(1.234).hasAttributes(Attributes.empty())))); } @Test @@ -136,19 +136,16 @@ void functionTimersWithSameNameAndDifferentTags() { assertThat(metric) .hasName("testFunctionTimerWithTags.sum") .hasUnit("ms") - .hasDoubleSum() - .points() - .anySatisfy( - point -> - assertThat(point) - .hasValue(12_000) - .attributes() - .containsOnly(attributeEntry("tag", "1"))) - .anySatisfy( - point -> - assertThat(point) - .hasValue(42_000) - .attributes() - .containsOnly(attributeEntry("tag", "2")))); + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point + .hasValue(12_000) + .hasAttributes(attributeEntry("tag", "1")), + point -> + point + .hasValue(42_000) + .hasAttributes(attributeEntry("tag", "2"))))); } } diff --git a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/GaugeTest.java b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/GaugeTest.java index 5944eab24aa..f358bd3aca6 100644 --- a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/GaugeTest.java +++ b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/GaugeTest.java @@ -6,7 +6,7 @@ package io.opentelemetry.micrometer1shim; import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.micrometer.core.instrument.Gauge; @@ -43,14 +43,13 @@ void testGauge() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test gauge") .hasUnit("items") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(42) - .attributes() - .containsOnly(attributeEntry("tag", "value")))); + .hasDoubleGaugeSatisfying( + doubleGauge -> + doubleGauge.hasPointsSatisfying( + point -> + point + .hasValue(42) + .hasAttributes(attributeEntry("tag", "value"))))); // when Metrics.globalRegistry.remove(gauge); @@ -79,20 +78,13 @@ void gaugesWithSameNameAndDifferentTags() { .hasName("testGaugeWithTags") .hasDescription("First description wins") .hasUnit("items") - .hasDoubleGauge() - .points() - .anySatisfy( - point -> - assertThat(point) - .hasValue(12) - .attributes() - .containsOnly(attributeEntry("tag", "1"))) - .anySatisfy( - point -> - assertThat(point) - .hasValue(42) - .attributes() - .containsOnly(attributeEntry("tag", "2")))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point.hasValue(12).hasAttributes(attributeEntry("tag", "1")), + point -> + point.hasValue(42).hasAttributes(attributeEntry("tag", "2"))))); } @Test @@ -107,9 +99,8 @@ void testWeakRefGauge() throws InterruptedException { metric -> assertThat(metric) .hasName("testWeakRefGauge") - .hasDoubleGauge() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(42))); + .hasDoubleGaugeSatisfying( + gauge -> gauge.hasPointsSatisfying(point -> point.hasValue(42)))); WeakReference numWeakRef = new WeakReference<>(num); num = null; diff --git a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/LongTaskTimerHistogramTest.java b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/LongTaskTimerHistogramTest.java index 27dce3ebcdf..1bf52e31ded 100644 --- a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/LongTaskTimerHistogramTest.java +++ b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/LongTaskTimerHistogramTest.java @@ -6,14 +6,15 @@ package io.opentelemetry.micrometer1shim; import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.micrometer.core.instrument.LongTaskTimer; import io.micrometer.core.instrument.Metrics; import io.micrometer.core.instrument.MockClock; +import io.opentelemetry.api.common.Attributes; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import java.time.Duration; -import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; @@ -57,11 +58,11 @@ void testMicrometerHistogram() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test timer") .hasUnit("tasks") - .hasLongSum() - .isNotMonotonic() - .points() - .satisfiesExactly( - point -> assertThat(point).hasValue(3).attributes().isEmpty()), + .hasLongSumSatisfying( + sum -> + sum.isNotMonotonic() + .hasPointsSatisfying( + point -> point.hasValue(3).hasAttributes(Attributes.empty()))), metric -> assertThat(metric) .hasName("testLongTaskTimerHistogram.duration") @@ -69,31 +70,31 @@ void testMicrometerHistogram() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test timer") .hasUnit("ms") - .hasDoubleSum() - .isNotMonotonic() - .points() - .satisfiesExactly( - point -> { - assertThat(point).attributes().isEmpty(); - // any value >0 - duration of currently running tasks - Assertions.assertThat(point.getValue()).isPositive(); - }), + .hasDoubleSumSatisfying( + sum -> + sum.isNotMonotonic() + .hasPointsSatisfying( + point -> + point + .hasAttributes(Attributes.empty()) + .satisfies( + pointData -> + assertThat(pointData.getValue()) + .isPositive()))), metric -> assertThat(metric) .hasName("testLongTaskTimerHistogram.histogram") .hasInstrumentationScope( InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) - .hasDoubleGauge() - .points() - .anySatisfy( - point -> - assertThat(point).hasValue(2).attributes().containsEntry("le", "100")) - .anySatisfy( - point -> - assertThat(point) - .hasValue(3) - .attributes() - .containsEntry("le", "1000"))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point.hasAttributes(attributeEntry("le", "100")).hasValue(2), + point -> + point + .hasAttributes(attributeEntry("le", "1000")) + .hasValue(3)))); sample1.stop(); sample2.stop(); @@ -109,11 +110,11 @@ void testMicrometerHistogram() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test timer") .hasUnit("tasks") - .hasLongSum() - .isNotMonotonic() - .points() - .satisfiesExactly( - point -> assertThat(point).hasValue(0).attributes().isEmpty()), + .hasLongSumSatisfying( + sum -> + sum.isNotMonotonic() + .hasPointsSatisfying( + point -> point.hasValue(0).hasAttributes(Attributes.empty()))), metric -> assertThat(metric) .hasName("testLongTaskTimerHistogram.duration") @@ -121,28 +122,22 @@ void testMicrometerHistogram() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test timer") .hasUnit("ms") - .hasDoubleSum() - .isNotMonotonic() - .points() - .satisfiesExactly( - point -> { - assertThat(point).attributes().isEmpty(); - // any value >0 - duration of currently running tasks - Assertions.assertThat(point.getValue()).isZero(); - }), + .hasDoubleSumSatisfying( + sum -> + sum.isNotMonotonic() + .hasPointsSatisfying( + point -> point.hasValue(0).hasAttributes(Attributes.empty()))), metric -> assertThat(metric) .hasName("testLongTaskTimerHistogram.histogram") - .hasDoubleGauge() - .points() - .anySatisfy( - point -> - assertThat(point).hasValue(0).attributes().containsEntry("le", "100")) - .anySatisfy( - point -> - assertThat(point) - .hasValue(0) - .attributes() - .containsEntry("le", "1000"))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point.hasValue(0).hasAttributes(attributeEntry("le", "100")), + point -> + point + .hasValue(0) + .hasAttributes(attributeEntry("le", "1000"))))); } } diff --git a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/LongTaskTimerSecondsTest.java b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/LongTaskTimerSecondsTest.java index 01f36a6d160..714bfa03e43 100644 --- a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/LongTaskTimerSecondsTest.java +++ b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/LongTaskTimerSecondsTest.java @@ -6,7 +6,7 @@ package io.opentelemetry.micrometer1shim; import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.micrometer.core.instrument.LongTaskTimer; @@ -51,15 +51,14 @@ void testLongTaskTimerWithBaseUnitSeconds() throws InterruptedException { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test long task timer") .hasUnit("tasks") - .hasLongSum() - .isNotMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(1) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasLongSumSatisfying( + sum -> + sum.isNotMonotonic() + .hasPointsSatisfying( + point -> + point + .hasValue(1) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testLongTaskTimerSeconds.duration") @@ -67,17 +66,17 @@ void testLongTaskTimerWithBaseUnitSeconds() throws InterruptedException { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test long task timer") .hasUnit("s") - .hasDoubleSum() - .isNotMonotonic() - .points() - .satisfiesExactly( - point -> { - assertThat(point) - .attributes() - .containsOnly(attributeEntry("tag", "value")); - // any value >0 - duration of currently running tasks - assertThat(point.getValue()).isPositive(); - })); + .hasDoubleSumSatisfying( + sum -> + sum.isNotMonotonic() + .hasPointsSatisfying( + point -> + point + .hasAttributes(attributeEntry("tag", "value")) + .satisfies( + pointData -> + assertThat(pointData.getValue()) + .isPositive())))); // when TimeUnit.MILLISECONDS.sleep(100); @@ -89,25 +88,23 @@ void testLongTaskTimerWithBaseUnitSeconds() throws InterruptedException { metric -> assertThat(metric) .hasName("testLongTaskTimerSeconds.active") - .hasLongSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(0) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasLongSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point + .hasValue(0) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testLongTaskTimerSeconds.duration") - .hasDoubleSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(0) - .attributes() - .containsOnly(attributeEntry("tag", "value")))); + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point + .hasValue(0) + .hasAttributes(attributeEntry("tag", "value"))))); // when timer is removed from the registry Metrics.globalRegistry.remove(timer); diff --git a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/LongTaskTimerTest.java b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/LongTaskTimerTest.java index 3b3e2c7d50b..3ee22ed80da 100644 --- a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/LongTaskTimerTest.java +++ b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/LongTaskTimerTest.java @@ -6,7 +6,7 @@ package io.opentelemetry.micrometer1shim; import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.micrometer.core.instrument.LongTaskTimer; @@ -40,31 +40,30 @@ void testLongTaskTimer() throws InterruptedException { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test long task timer") .hasUnit("tasks") - .hasLongSum() - .isNotMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(1) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasLongSumSatisfying( + sum -> + sum.isNotMonotonic() + .hasPointsSatisfying( + point -> + point + .hasValue(1) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testLongTaskTimer.duration") .hasDescription("This is a test long task timer") .hasUnit("ms") - .hasDoubleSum() - .isNotMonotonic() - .points() - .satisfiesExactly( - point -> { - assertThat(point) - .attributes() - .containsOnly(attributeEntry("tag", "value")); - // any value >0 - duration of currently running tasks - assertThat(point.getValue()).isPositive(); - })); + .hasDoubleSumSatisfying( + sum -> + sum.isNotMonotonic() + .hasPointsSatisfying( + point -> + point + .hasAttributes(attributeEntry("tag", "value")) + .satisfies( + pointData -> + assertThat(pointData.getValue()) + .isPositive())))); // when TimeUnit.MILLISECONDS.sleep(100); @@ -76,25 +75,23 @@ void testLongTaskTimer() throws InterruptedException { metric -> assertThat(metric) .hasName("testLongTaskTimer.active") - .hasLongSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(0) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasLongSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point + .hasValue(0) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testLongTaskTimer.duration") - .hasDoubleSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(0) - .attributes() - .containsOnly(attributeEntry("tag", "value")))); + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point + .hasValue(0) + .hasAttributes(attributeEntry("tag", "value"))))); // when timer is removed from the registry Metrics.globalRegistry.remove(timer); diff --git a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/MeterTest.java b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/MeterTest.java index 39be4d2ebf7..00432c9682b 100644 --- a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/MeterTest.java +++ b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/MeterTest.java @@ -6,7 +6,7 @@ package io.opentelemetry.micrometer1shim; import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.micrometer.core.instrument.Measurement; @@ -56,109 +56,101 @@ void testMeter() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test meter") .hasUnit("things") - .hasDoubleSum() - .isMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(12345) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasDoubleSumSatisfying( + sum -> + sum.isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasValue(12345) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testMeter.total_time") .hasDescription("This is a test meter") .hasUnit("things") - .hasDoubleSum() - .isMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(12345) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasDoubleSumSatisfying( + sum -> + sum.isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasValue(12345) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testMeter.count") .hasDescription("This is a test meter") .hasUnit("things") - .hasDoubleSum() - .isMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(12345) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasDoubleSumSatisfying( + sum -> + sum.isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasValue(12345) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testMeter.active") .hasDescription("This is a test meter") .hasUnit("things") - .hasDoubleSum() - .isNotMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(12345) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasDoubleSumSatisfying( + sum -> + sum.isNotMonotonic() + .hasPointsSatisfying( + point -> + point + .hasValue(12345) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testMeter.duration") .hasDescription("This is a test meter") .hasUnit("things") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(12345) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(12345) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testMeter.max") .hasDescription("This is a test meter") .hasUnit("things") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(12345) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(12345) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testMeter.value") .hasDescription("This is a test meter") .hasUnit("things") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(12345) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(12345) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testMeter.unknown") .hasDescription("This is a test meter") .hasUnit("things") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(12345) - .attributes() - .containsOnly(attributeEntry("tag", "value")))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(12345) + .hasAttributes(attributeEntry("tag", "value"))))); // when Metrics.globalRegistry.remove(meter); diff --git a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/NamingConventionTest.java b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/NamingConventionTest.java index c6505f8a2cd..5a48139fa0f 100644 --- a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/NamingConventionTest.java +++ b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/NamingConventionTest.java @@ -6,7 +6,7 @@ package io.opentelemetry.micrometer1shim; import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.micrometer.core.instrument.Counter; @@ -69,13 +69,12 @@ void renameCounter() { .hasName("test.renamedCounter") .hasInstrumentationScope( InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) - .hasDoubleSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .attributes() - .containsOnly(attributeEntry("test.tag", "test.value")))); + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point.hasAttributes( + attributeEntry("test.tag", "test.value"))))); } @Test @@ -88,23 +87,20 @@ void renameDistributionSummary() { metric -> assertThat(metric) .hasName("test.renamedSummary") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .attributes() - .containsOnly(attributeEntry("test.tag", "test.value"))), + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point.hasAttributes(attributeEntry("test.tag", "test.value")))), metric -> assertThat(metric) .hasName("test.renamedSummary.max") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .attributes() - .containsOnly(attributeEntry("test.tag", "test.value")))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point.hasAttributes( + attributeEntry("test.tag", "test.value"))))); } @Test @@ -116,13 +112,12 @@ void renameFunctionCounter() { metric -> assertThat(metric) .hasName("test.renamedFunctionCounter") - .hasDoubleSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .attributes() - .containsOnly(attributeEntry("test.tag", "test.value")))); + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point.hasAttributes( + attributeEntry("test.tag", "test.value"))))); } @Test @@ -141,23 +136,20 @@ void renameFunctionTimer() { metric -> assertThat(metric) .hasName("test.renamedFunctionTimer.count") - .hasLongSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .attributes() - .containsOnly(attributeEntry("test.tag", "test.value"))), + .hasLongSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point.hasAttributes(attributeEntry("test.tag", "test.value")))), metric -> assertThat(metric) .hasName("test.renamedFunctionTimer.sum") - .hasDoubleSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .attributes() - .containsOnly(attributeEntry("test.tag", "test.value")))); + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point.hasAttributes( + attributeEntry("test.tag", "test.value"))))); } @Test @@ -169,13 +161,12 @@ void renameGauge() { metric -> assertThat(metric) .hasName("test.renamedGauge") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .attributes() - .containsOnly(attributeEntry("test.tag", "test.value")))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point.hasAttributes( + attributeEntry("test.tag", "test.value"))))); } @Test @@ -188,23 +179,20 @@ void renameLongTaskTimer() { metric -> assertThat(metric) .hasName("test.renamedLongTaskTimer.active") - .hasLongSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .attributes() - .containsOnly(attributeEntry("test.tag", "test.value"))), + .hasLongSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point.hasAttributes(attributeEntry("test.tag", "test.value")))), metric -> assertThat(metric) .hasName("test.renamedLongTaskTimer.duration") - .hasDoubleSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .attributes() - .containsOnly(attributeEntry("test.tag", "test.value")))); + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point.hasAttributes( + attributeEntry("test.tag", "test.value"))))); } @Test @@ -217,22 +205,19 @@ void renameTimer() { metric -> assertThat(metric) .hasName("test.renamedTimer") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .attributes() - .containsOnly(attributeEntry("test.tag", "test.value"))), + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point.hasAttributes(attributeEntry("test.tag", "test.value")))), metric -> assertThat(metric) .hasName("test.renamedTimer.max") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .attributes() - .containsOnly(attributeEntry("test.tag", "test.value")))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point.hasAttributes( + attributeEntry("test.tag", "test.value"))))); } } diff --git a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/PrometheusModeTest.java b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/PrometheusModeTest.java index 07e88cc58d9..b2765a87832 100644 --- a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/PrometheusModeTest.java +++ b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/PrometheusModeTest.java @@ -6,7 +6,7 @@ package io.opentelemetry.micrometer1shim; import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.micrometer.core.instrument.Counter; @@ -66,15 +66,14 @@ void testCounter() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test counter") .hasUnit("items") - .hasDoubleSum() - .isMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(12) - .attributes() - .containsOnly(attributeEntry("tag", "value")))); + .hasDoubleSumSatisfying( + sum -> + sum.isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasValue(12) + .hasAttributes(attributeEntry("tag", "value"))))); } @Test @@ -101,15 +100,14 @@ void testDistributionSummary() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test summary") .hasUnit("items") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasSum(54) - .hasCount(2) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasSum(54) + .hasCount(2) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testPrometheusSummary.items.max") @@ -117,14 +115,13 @@ void testDistributionSummary() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test summary") .hasUnit("items") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(42) - .attributes() - .containsOnly(attributeEntry("tag", "value")))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(42) + .hasAttributes(attributeEntry("tag", "value"))))); } @Test @@ -153,15 +150,14 @@ void testFunctionTimer() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test function timer") .hasUnit("1") - .hasLongSum() - .isMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(1) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasLongSumSatisfying( + sum -> + sum.isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasValue(1) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testPrometheusFunctionTimer.seconds.sum") @@ -169,14 +165,13 @@ void testFunctionTimer() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test function timer") .hasUnit("s") - .hasDoubleSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(42) - .attributes() - .containsOnly(attributeEntry("tag", "value")))); + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point + .hasValue(42) + .hasAttributes(attributeEntry("tag", "value"))))); } @Test @@ -198,14 +193,13 @@ void testGauge() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test gauge") .hasUnit("items") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(42) - .attributes() - .containsOnly(attributeEntry("tag", "value")))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(42) + .hasAttributes(attributeEntry("tag", "value"))))); } @Test @@ -230,15 +224,14 @@ void testLongTaskTimer() throws InterruptedException { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test long task timer") .hasUnit("tasks") - .hasLongSum() - .isNotMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(1) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasLongSumSatisfying( + sum -> + sum.isNotMonotonic() + .hasPointsSatisfying( + point -> + point + .hasValue(1) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testPrometheusLongTaskTimer.seconds.duration") @@ -246,17 +239,17 @@ void testLongTaskTimer() throws InterruptedException { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test long task timer") .hasUnit("s") - .hasDoubleSum() - .isNotMonotonic() - .points() - .satisfiesExactly( - point -> { - assertThat(point) - .attributes() - .containsOnly(attributeEntry("tag", "value")); - // any value >0 - duration of currently running tasks - assertThat(point.getValue()).isPositive(); - })); + .hasDoubleSumSatisfying( + sum -> + sum.isNotMonotonic() + .hasPointsSatisfying( + point -> + point + .hasAttributes(attributeEntry("tag", "value")) + .satisfies( + pointData -> + assertThat(pointData.getValue()) + .isPositive())))); // when TimeUnit.MILLISECONDS.sleep(100); @@ -268,25 +261,23 @@ void testLongTaskTimer() throws InterruptedException { metric -> assertThat(metric) .hasName("testPrometheusLongTaskTimer.seconds.active") - .hasLongSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(0) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasLongSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point + .hasValue(0) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testPrometheusLongTaskTimer.seconds.duration") - .hasDoubleSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(0) - .attributes() - .containsOnly(attributeEntry("tag", "value")))); + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point + .hasValue(0) + .hasAttributes(attributeEntry("tag", "value"))))); } @Test @@ -313,15 +304,14 @@ void testTimer() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test timer") .hasUnit("s") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasSum(16.789) - .hasCount(3) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasSum(16.789) + .hasCount(3) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testPrometheusTimer.seconds.max") @@ -329,13 +319,12 @@ void testTimer() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test timer") .hasUnit("s") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(10.789) - .attributes() - .containsOnly(attributeEntry("tag", "value")))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(10.789) + .hasAttributes(attributeEntry("tag", "value"))))); } } diff --git a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/TimerSecondsTest.java b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/TimerSecondsTest.java index 6637a41272d..d8bd0a49dd4 100644 --- a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/TimerSecondsTest.java +++ b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/TimerSecondsTest.java @@ -6,7 +6,7 @@ package io.opentelemetry.micrometer1shim; import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.micrometer.core.instrument.Metrics; @@ -51,28 +51,26 @@ void testTimerWithBaseUnitSeconds() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test timer") .hasUnit("s") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasSum(23.345) - .hasCount(3) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasSum(23.345) + .hasCount(3) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testTimerSeconds.max") .hasDescription("This is a test timer") .hasUnit("s") - .hasDoubleGauge() - .points() - .anySatisfy( - point -> - assertThat(point) - .hasValue(12.345) - .attributes() - .containsEntry("tag", "value"))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(12.345) + .hasAttributes(attributeEntry("tag", "value"))))); Metrics.globalRegistry.remove(timer); timer.record(12, TimeUnit.SECONDS); @@ -82,8 +80,17 @@ void testTimerWithBaseUnitSeconds() { metric -> assertThat(metric) .hasName("testTimerSeconds") - .hasDoubleHistogram() - .points() - .noneSatisfy(point -> assertThat(point).hasSum(35.345).hasCount(4))); + .hasInstrumentationScope( + InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) + .hasDescription("This is a test timer") + .hasUnit("s") + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasSum(23.345) + .hasCount(3) + .hasAttributes(attributeEntry("tag", "value"))))); } } diff --git a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/TimerTest.java b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/TimerTest.java index 14de1437e36..0b49c22ff57 100644 --- a/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/TimerTest.java +++ b/micrometer1-shim/src/test/java/io/opentelemetry/micrometer1shim/TimerTest.java @@ -6,11 +6,12 @@ package io.opentelemetry.micrometer1shim; import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.micrometer.core.instrument.Metrics; import io.micrometer.core.instrument.Timer; +import io.opentelemetry.api.common.Attributes; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import java.time.Duration; import java.util.concurrent.TimeUnit; @@ -42,27 +43,25 @@ void testTimer() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test timer") .hasUnit("ms") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasSum(42_000) - .hasCount(1) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasSum(42_000) + .hasCount(1) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testTimer.max") .hasDescription("This is a test timer") - .hasDoubleGauge() - .points() - .anySatisfy( - point -> - assertThat(point) - .hasValue(42_000) - .attributes() - .containsEntry("tag", "value"))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(42_000) + .hasAttributes(attributeEntry("tag", "value"))))); Metrics.globalRegistry.remove(timer); timer.record(12, TimeUnit.SECONDS); @@ -74,15 +73,14 @@ void testTimer() { metric -> assertThat(metric) .hasName("testTimer") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasSum(42_000) - .hasCount(1) - .attributes() - .containsOnly(attributeEntry("tag", "value")))); + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasSum(42_000) + .hasCount(1) + .hasAttributes(attributeEntry("tag", "value"))))); } @Test @@ -97,17 +95,21 @@ void testNanoPrecision() { assertThat(metric) .hasName("testNanoTimer") .hasUnit("ms") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point).hasSum(1.234).hasCount(1).attributes().isEmpty()), + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasSum(1.234) + .hasCount(1) + .hasAttributes(Attributes.empty()))), metric -> assertThat(metric) .hasName("testNanoTimer.max") - .hasDoubleGauge() - .points() - .anySatisfy(point -> assertThat(point).hasValue(1.234).attributes().isEmpty())); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> point.hasValue(1.234).hasAttributes(Attributes.empty())))); } @Test @@ -139,47 +141,55 @@ void testMicrometerHistogram() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test timer") .hasUnit("ms") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasSum(555500) - .hasCount(4) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasSum(555500) + .hasCount(4) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testTimer.max") .hasDescription("This is a test timer") - .hasDoubleGauge() - .points() - .anySatisfy( - point -> - assertThat(point) - .hasValue(500000) - .attributes() - .containsEntry("tag", "value")), + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(500000) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testTimer.histogram") - .hasDoubleGauge() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point).hasValue(1).attributes().containsEntry("le", "1000"), - point -> - assertThat(point).hasValue(2).attributes().containsEntry("le", "10000"), - point -> - assertThat(point) - .hasValue(3) - .attributes() - .containsEntry("le", "100000"), - point -> - assertThat(point) - .hasValue(4) - .attributes() - .containsEntry("le", "1000000"))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(1) + .hasAttributes( + attributeEntry("le", "1000"), + attributeEntry("tag", "value")), + point -> + point + .hasValue(2) + .hasAttributes( + attributeEntry("le", "10000"), + attributeEntry("tag", "value")), + point -> + point + .hasValue(3) + .hasAttributes( + attributeEntry("le", "100000"), + attributeEntry("tag", "value")), + point -> + point + .hasValue(4) + .hasAttributes( + attributeEntry("le", "1000000"), + attributeEntry("tag", "value"))))); } @Test @@ -204,36 +214,42 @@ void testMicrometerPercentiles() { InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null)) .hasDescription("This is a test timer") .hasUnit("ms") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasSum(150) - .hasCount(2) - .attributes() - .containsOnly(attributeEntry("tag", "value"))), + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasSum(150) + .hasCount(2) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testTimer.max") .hasDescription("This is a test timer") - .hasDoubleGauge() - .points() - .anySatisfy( - point -> - assertThat(point) - .hasValue(100) - .attributes() - .containsEntry("tag", "value")), + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(100) + .hasAttributes(attributeEntry("tag", "value")))), metric -> assertThat(metric) .hasName("testTimer.percentile") - .hasDoubleGauge() - .points() - .anySatisfy(point -> assertThat(point).attributes().containsEntry("phi", "0.5")) - .anySatisfy( - point -> assertThat(point).attributes().containsEntry("phi", "0.95")) - .anySatisfy( - point -> assertThat(point).attributes().containsEntry("phi", "0.99"))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point.hasAttributes( + attributeEntry("phi", "0.5"), + attributeEntry("tag", "value")), + point -> + point.hasAttributes( + attributeEntry("phi", "0.95"), + attributeEntry("tag", "value")), + point -> + point.hasAttributes( + attributeEntry("phi", "0.99"), + attributeEntry("tag", "value"))))); } } diff --git a/opencensus-shim/build.gradle.kts b/opencensus-shim/build.gradle.kts index cafa80c7a24..7d3b7e908ce 100644 --- a/opencensus-shim/build.gradle.kts +++ b/opencensus-shim/build.gradle.kts @@ -17,7 +17,7 @@ dependencies { api("io.opencensus:opencensus-exporter-metrics-util") testImplementation(project(":sdk:all")) - testImplementation(project(":sdk:metrics-testing")) + testImplementation(project(":sdk:testing")) testImplementation("io.opencensus:opencensus-impl") testImplementation("io.opencensus:opencensus-contrib-exemplar-util") diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java index eebfd80824b..19673e9e312 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/OpenTelemetryMetricExporterTest.java @@ -5,7 +5,8 @@ package io.opentelemetry.opencensusshim; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import static org.assertj.core.api.Assertions.assertThat; import com.google.common.collect.ImmutableList; @@ -22,6 +23,7 @@ import io.opencensus.tags.TagValue; import io.opencensus.tags.Tagger; import io.opencensus.tags.Tags; +import io.opentelemetry.api.common.Attributes; import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter; import java.util.Comparator; @@ -112,51 +114,55 @@ void testSupportedMetricsExportedCorrectly() throws Exception { .hasName("double_gauge") .hasDescription("double gauge") .hasUnit("ms") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point).hasValue(60).attributes().hasSize(0)), + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(60) + .hasAttributes(Attributes.empty()))), metric -> assertThat(metric) .hasName("double_sum") .hasDescription("double sum") .hasUnit("ms") - .hasDoubleSum() - .points() - .satisfiesExactly( - point -> - assertThat(point).hasValue(60).attributes().hasSize(0)), + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point + .hasValue(60) + .hasAttributes(Attributes.empty()))), metric -> assertThat(metric) .hasName("long_gauge") .hasDescription("long gauge") .hasUnit("ms") - .hasLongGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(50) - .attributes() - .hasSize(1) - .containsEntry( - tagKey.getName(), tagValue.asString())), + .hasLongGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(50) + .hasAttributes( + attributeEntry( + tagKey.getName(), + tagValue.asString())))), metric -> assertThat(metric) .hasName("long_sum") .hasDescription("long sum") .hasUnit("ms") - .hasLongSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(50) - .attributes() - .hasSize(1) - .containsEntry( - tagKey.getName(), tagValue.asString())))); + .hasLongSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point + .hasValue(50) + .hasAttributes( + attributeEntry( + tagKey.getName(), + tagValue.asString())))))); } finally { otelExporter.stop(); } diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapterTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapterTest.java index 39ed10bcb94..07d7c3b462f 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapterTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/internal/metrics/MetricAdapterTest.java @@ -5,7 +5,8 @@ package io.opentelemetry.opencensusshim.internal.metrics; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.opencensus.common.Timestamp; import io.opencensus.metrics.LabelKey; @@ -25,7 +26,6 @@ import io.opentelemetry.api.trace.TraceFlags; import io.opentelemetry.api.trace.TraceState; import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableValueAtQuantile; import io.opentelemetry.sdk.resources.Resource; import java.util.Arrays; import java.util.Collections; @@ -65,10 +65,11 @@ void convertsLongGauge() { "description", "unit", MetricDescriptor.Type.GAUGE_INT64, - Arrays.asList(LabelKey.create("key1", "desc1"))), + Collections.singletonList(LabelKey.create("key1", "desc1"))), TimeSeries.create( - Arrays.asList(LabelValue.create("value1")), - Arrays.asList(Point.create(Value.longValue(4), Timestamp.fromMillis(2000))), + Collections.singletonList(LabelValue.create("value1")), + Collections.singletonList( + Point.create(Value.longValue(4), Timestamp.fromMillis(2000))), Timestamp.fromMillis(1000))); assertThat(MetricAdapter.convert(RESOURCE, censusMetric)) @@ -77,15 +78,15 @@ void convertsLongGauge() { .hasName("name") .hasDescription("description") .hasUnit("unit") - .hasLongGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(1000000000) - .hasEpochNanos(2000000000) - .hasAttributes(Attributes.of(AttributeKey.stringKey("key1"), "value1")) - .hasValue(4)); + .hasLongGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasValue(4) + .hasStartEpochNanos(1000000000) + .hasEpochNanos(2000000000) + .hasAttributes(attributeEntry("key1", "value1")))); } @Test @@ -97,10 +98,11 @@ void convertsDoubleGauge() { "description", "unit", MetricDescriptor.Type.GAUGE_DOUBLE, - Arrays.asList(LabelKey.create("key1", "desc1"))), + Collections.singletonList(LabelKey.create("key1", "desc1"))), TimeSeries.create( - Arrays.asList(LabelValue.create("value1")), - Arrays.asList(Point.create(Value.doubleValue(4), Timestamp.fromMillis(2000))), + Collections.singletonList(LabelValue.create("value1")), + Collections.singletonList( + Point.create(Value.doubleValue(4), Timestamp.fromMillis(2000))), Timestamp.fromMillis(1000))); assertThat(MetricAdapter.convert(RESOURCE, censusMetric)) @@ -109,15 +111,15 @@ void convertsDoubleGauge() { .hasName("name") .hasDescription("description") .hasUnit("unit") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(1000000000) - .hasEpochNanos(2000000000) - .hasAttributes(Attributes.of(AttributeKey.stringKey("key1"), "value1")) - .hasValue(4)); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(1000000000) + .hasEpochNanos(2000000000) + .hasAttributes(attributeEntry("key1", "value1")) + .hasValue(4))); } @Test @@ -129,10 +131,11 @@ void convertsLongSum() { "description", "unit", MetricDescriptor.Type.CUMULATIVE_INT64, - Arrays.asList(LabelKey.create("key1", "desc1"))), + Collections.singletonList(LabelKey.create("key1", "desc1"))), TimeSeries.create( - Arrays.asList(LabelValue.create("value1")), - Arrays.asList(Point.create(Value.longValue(4), Timestamp.fromMillis(2000))), + Collections.singletonList(LabelValue.create("value1")), + Collections.singletonList( + Point.create(Value.longValue(4), Timestamp.fromMillis(2000))), Timestamp.fromMillis(1000))); assertThat(MetricAdapter.convert(RESOURCE, censusMetric)) @@ -141,17 +144,17 @@ void convertsLongSum() { .hasName("name") .hasDescription("description") .hasUnit("unit") - .hasLongSum() - .isCumulative() - .isMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(1000000000) - .hasEpochNanos(2000000000) - .hasAttributes(Attributes.of(AttributeKey.stringKey("key1"), "value1")) - .hasValue(4)); + .hasLongSumSatisfying( + sum -> + sum.isCumulative() + .isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(1000000000) + .hasEpochNanos(2000000000) + .hasAttributes(attributeEntry("key1", "value1")) + .hasValue(4))); } @Test @@ -163,10 +166,11 @@ void convertsDoubleSum() { "description", "unit", MetricDescriptor.Type.CUMULATIVE_DOUBLE, - Arrays.asList(LabelKey.create("key1", "desc1"))), + Collections.singletonList(LabelKey.create("key1", "desc1"))), TimeSeries.create( - Arrays.asList(LabelValue.create("value1")), - Arrays.asList(Point.create(Value.doubleValue(4), Timestamp.fromMillis(2000))), + Collections.singletonList(LabelValue.create("value1")), + Collections.singletonList( + Point.create(Value.doubleValue(4), Timestamp.fromMillis(2000))), Timestamp.fromMillis(1000))); assertThat(MetricAdapter.convert(RESOURCE, censusMetric)) @@ -175,17 +179,17 @@ void convertsDoubleSum() { .hasName("name") .hasDescription("description") .hasUnit("unit") - .hasDoubleSum() - .isCumulative() - .isMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(1000000000) - .hasEpochNanos(2000000000) - .hasAttributes(Attributes.of(AttributeKey.stringKey("key1"), "value1")) - .hasValue(4)); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(1000000000) + .hasEpochNanos(2000000000) + .hasAttributes(attributeEntry("key1", "value1")) + .hasValue(4))); } @Test @@ -203,10 +207,10 @@ void convertHistogram() { "description", "unit", MetricDescriptor.Type.CUMULATIVE_DISTRIBUTION, - Arrays.asList(LabelKey.create("key1", "desc1"))), + Collections.singletonList(LabelKey.create("key1", "desc1"))), TimeSeries.create( - Arrays.asList(LabelValue.create("value1")), - Arrays.asList( + Collections.singletonList(LabelValue.create("value1")), + Collections.singletonList( Point.create( Value.distributionValue( Distribution.create( @@ -233,30 +237,31 @@ void convertHistogram() { .hasName("name") .hasDescription("description") .hasUnit("unit") - .hasDoubleHistogram() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(1000000000) - .hasEpochNanos(2000000000) - .hasSum(5) - .hasCount(10) - .hasBucketBoundaries(2.0, 5.0) - .hasBucketCounts(2, 6, 2) - .hasExemplars( - ImmutableDoubleExemplarData.create( - Attributes.empty(), 2000000, SpanContext.getInvalid(), 1.0), - ImmutableDoubleExemplarData.create( - Attributes.empty(), - 1000000, - SpanContext.create( - "00000000000000000000000000000001", - "0000000000000002", - TraceFlags.getDefault(), - TraceState.getDefault()), - 4.0))); + .hasHistogramSatisfying( + histogram -> + histogram + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(1000000000) + .hasEpochNanos(2000000000) + .hasSum(5) + .hasCount(10) + .hasBucketBoundaries(2.0, 5.0) + .hasBucketCounts(2, 6, 2) + .hasExemplars( + ImmutableDoubleExemplarData.create( + Attributes.empty(), 2000000, SpanContext.getInvalid(), 1.0), + ImmutableDoubleExemplarData.create( + Attributes.empty(), + 1000000, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 4.0)))); } @Test @@ -268,10 +273,10 @@ void convertSummary() { "description", "unit", MetricDescriptor.Type.SUMMARY, - Arrays.asList(LabelKey.create("key1", "desc1"))), + Collections.singletonList(LabelKey.create("key1", "desc1"))), TimeSeries.create( - Arrays.asList(LabelValue.create("value1")), - Arrays.asList( + Collections.singletonList(LabelValue.create("value1")), + Collections.singletonList( Point.create( Value.summaryValue( Summary.create( @@ -280,7 +285,7 @@ void convertSummary() { Summary.Snapshot.create( 10L, 5d, - Arrays.asList( + Collections.singletonList( Summary.Snapshot.ValueAtPercentile.create(100.0, 200))))), Timestamp.fromMillis(2000))), Timestamp.fromMillis(1000))); @@ -291,17 +296,17 @@ void convertSummary() { .hasName("name") .hasDescription("description") .hasUnit("unit") - .hasDoubleSummary() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(1000000000) - .hasEpochNanos(2000000000) - .hasAttributes(Attributes.of(AttributeKey.stringKey("key1"), "value1")) - .hasCount(10) - .hasSum(5) - .hasValues(ImmutableValueAtQuantile.create(1.0, 200))); + .hasSummarySatisfying( + summary -> + summary.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(1000000000) + .hasEpochNanos(2000000000) + .hasAttributes(attributeEntry("key1", "value1")) + .hasCount(10) + .hasSum(5) + .hasValuesSatisfying(value -> value.hasValue(200.0).hasQuantile(1.0)))); } @Test @@ -319,10 +324,10 @@ void convertGaugeHistogram() { "description", "unit", MetricDescriptor.Type.GAUGE_DISTRIBUTION, - Arrays.asList(LabelKey.create("key1", "desc1"))), + Collections.singletonList(LabelKey.create("key1", "desc1"))), TimeSeries.create( - Arrays.asList(LabelValue.create("value1")), - Arrays.asList( + Collections.singletonList(LabelValue.create("value1")), + Collections.singletonList( Point.create( Value.distributionValue( Distribution.create( @@ -348,29 +353,30 @@ void convertGaugeHistogram() { .hasName("name") .hasDescription("description") .hasUnit("unit") - .hasDoubleHistogram() - .isDelta() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(2000000000) - .hasEpochNanos(2000000000) - .hasSum(5) - .hasCount(10) - .hasBucketBoundaries(2.0, 5.0) - .hasBucketCounts(2, 6, 2) - .hasExemplars( - ImmutableDoubleExemplarData.create( - Attributes.empty(), 2000000, SpanContext.getInvalid(), 1.0), - ImmutableDoubleExemplarData.create( - Attributes.empty(), - 1000000, - SpanContext.create( - "00000000000000000000000000000001", - "0000000000000002", - TraceFlags.getDefault(), - TraceState.getDefault()), - 4.0))); + .hasHistogramSatisfying( + histogram -> + histogram + .isDelta() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(2000000000) + .hasEpochNanos(2000000000) + .hasSum(5) + .hasCount(10) + .hasBucketBoundaries(2.0, 5.0) + .hasBucketCounts(2, 6, 2) + .hasExemplars( + ImmutableDoubleExemplarData.create( + Attributes.empty(), 2000000, SpanContext.getInvalid(), 1.0), + ImmutableDoubleExemplarData.create( + Attributes.empty(), + 1000000, + SpanContext.create( + "00000000000000000000000000000001", + "0000000000000002", + TraceFlags.getDefault(), + TraceState.getDefault()), + 4.0)))); } } diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java index f7026baa6da..1e9f756ca47 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricProducerTest.java @@ -5,7 +5,7 @@ package io.opentelemetry.opencensusshim.metrics; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import io.opencensus.contrib.exemplar.util.ExemplarUtils; import io.opencensus.stats.Aggregation; @@ -76,24 +76,28 @@ void extractHistogram() throws InterruptedException { .hasName("task_latency_distribution") .hasDescription("The distribution of the task latencies.") .hasUnit("ms") - .hasDoubleHistogram() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasSum(50) - .hasCount(1) - .hasBucketCounts(1, 0, 0, 0, 0, 0, 0) - .hasBucketBoundaries( - 100d, 200d, 400d, 1000d, 2000d, 4000d) - .exemplars() - .satisfiesExactly( - exemplar -> - assertThat(exemplar) - .hasFilteredAttributes(Attributes.empty()) - .hasValue(50) - .hasTraceId(TRACE_ID.toLowerBase16()) - .hasSpanId(SPAN_ID.toLowerBase16()))))); + .hasHistogramSatisfying( + histogram -> + histogram + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasSum(50) + .hasCount(1) + .hasBucketCounts(1, 0, 0, 0, 0, 0, 0) + .hasBucketBoundaries( + 100d, 200d, 400d, 1000d, 2000d, 4000d) + .hasExemplarsSatisfying( + exemplar -> + exemplar + .hasFilteredAttributes( + Attributes.empty()) + .hasValue(50) + .hasTraceId( + TRACE_ID.toLowerBase16()) + .hasSpanId( + SPAN_ID + .toLowerBase16())))))); } } diff --git a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java index 98066c7fa6a..661054ecc1b 100644 --- a/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java +++ b/opencensus-shim/src/test/java/io/opentelemetry/opencensusshim/metrics/OpenCensusMetricsTest.java @@ -5,7 +5,7 @@ package io.opentelemetry.opencensusshim.metrics; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import io.opencensus.stats.Aggregation; import io.opencensus.stats.Measure; @@ -48,7 +48,9 @@ void capturesOpenCensusAndOtelMetrics() throws InterruptedException { () -> assertThat(reader.collectAllMetrics()) .satisfiesExactly( - metric -> assertThat(metric).hasName("otel.sum").hasLongSum(), - metric -> assertThat(metric).hasName("oc.sum").hasLongSum())); + metric -> + assertThat(metric).hasName("otel.sum").hasLongSumSatisfying(sum -> {}), + metric -> + assertThat(metric).hasName("oc.sum").hasLongSumSatisfying(sum -> {}))); } } diff --git a/perf-harness/build.gradle.kts b/perf-harness/build.gradle.kts index e3ab1dce379..ea6520c5f4e 100644 --- a/perf-harness/build.gradle.kts +++ b/perf-harness/build.gradle.kts @@ -9,7 +9,6 @@ dependencies { implementation(project(":api:all")) implementation(project(":sdk:all")) implementation(project(":sdk:testing")) - implementation(project(":sdk:metrics-testing")) implementation(project(":exporters:otlp:trace")) implementation(project(":exporters:logging")) implementation(project(":semconv")) diff --git a/sdk-extensions/metric-incubator/build.gradle.kts b/sdk-extensions/metric-incubator/build.gradle.kts index e1eb309d4a1..2c9ef7fe96c 100644 --- a/sdk-extensions/metric-incubator/build.gradle.kts +++ b/sdk-extensions/metric-incubator/build.gradle.kts @@ -16,7 +16,6 @@ dependencies { testImplementation(project(":sdk:testing")) testImplementation(project(":sdk-extensions:autoconfigure")) - testImplementation(project(":sdk:metrics-testing")) testImplementation("com.google.guava:guava") } diff --git a/sdk-extensions/metric-incubator/src/test/java/io/opentelemetry/sdk/viewconfig/ViewConfigCustomizerTest.java b/sdk-extensions/metric-incubator/src/test/java/io/opentelemetry/sdk/viewconfig/ViewConfigCustomizerTest.java index fd9b35caef2..49e5472f564 100644 --- a/sdk-extensions/metric-incubator/src/test/java/io/opentelemetry/sdk/viewconfig/ViewConfigCustomizerTest.java +++ b/sdk-extensions/metric-incubator/src/test/java/io/opentelemetry/sdk/viewconfig/ViewConfigCustomizerTest.java @@ -5,7 +5,8 @@ package io.opentelemetry.sdk.viewconfig; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.Mockito.mock; @@ -61,20 +62,17 @@ void customizeMeterProvider_Spi() { assertThat(reader.collectAllMetrics()) .satisfiesExactly( - metricData -> { - assertThat(metricData) - .hasLongSum() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasValue(1) - .hasAttributes( - Attributes.builder() - .put("foo", "val") - .put("bar", "val") - .build())); - }); + metricData -> + assertThat(metricData) + .hasLongSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point + .hasValue(1) + .hasAttributes( + attributeEntry("foo", "val"), + attributeEntry("bar", "val"))))); } @Test diff --git a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/DoublePointDataAssert.java b/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/DoublePointDataAssert.java deleted file mode 100644 index f84952f9c03..00000000000 --- a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/DoublePointDataAssert.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.testing.assertj; - -import io.opentelemetry.sdk.metrics.data.DoublePointData; -import org.assertj.core.api.Assertions; - -/** Test assertions for {@link DoublePointData}. */ -public class DoublePointDataAssert - extends AbstractPointDataAssert { - - protected DoublePointDataAssert(DoublePointData actual) { - super(actual, DoublePointDataAssert.class); - } - - /** Ensures the {@code as_double} field matches the expected value. */ - public DoublePointDataAssert hasValue(double expected) { - isNotNull(); - Assertions.assertThat(actual.getValue()).as("value").isEqualTo(expected); - return this; - } -} diff --git a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/GaugeAssert.java b/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/GaugeAssert.java deleted file mode 100644 index 884d6d312a0..00000000000 --- a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/GaugeAssert.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.testing.assertj; - -import io.opentelemetry.sdk.metrics.data.GaugeData; -import io.opentelemetry.sdk.metrics.data.PointData; -import org.assertj.core.api.AbstractAssert; -import org.assertj.core.api.AbstractIterableAssert; -import org.assertj.core.api.Assertions; - -/** Test assertions for {@link GaugeData}. */ -public class GaugeAssert extends AbstractAssert, GaugeData> { - protected GaugeAssert(GaugeData actual) { - super(actual, GaugeAssert.class); - } - - public AbstractIterableAssert, T, ?> points() { - isNotNull(); - return Assertions.assertThat(actual.getPoints()); - } -} diff --git a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/HistogramAssert.java b/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/HistogramAssert.java deleted file mode 100644 index afb58fd404e..00000000000 --- a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/HistogramAssert.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.testing.assertj; - -import io.opentelemetry.sdk.metrics.data.AggregationTemporality; -import io.opentelemetry.sdk.metrics.data.HistogramData; -import io.opentelemetry.sdk.metrics.data.HistogramPointData; -import org.assertj.core.api.AbstractAssert; -import org.assertj.core.api.AbstractIterableAssert; -import org.assertj.core.api.Assertions; - -/** Test assertions for {@link HistogramData}. */ -public class HistogramAssert extends AbstractAssert { - - protected HistogramAssert(HistogramData actual) { - super(actual, HistogramAssert.class); - } - - /** Ensures that {@code aggregation_temporality} field is {@code CUMULATIVE}. */ - public HistogramAssert isCumulative() { - isNotNull(); - if (actual.getAggregationTemporality() != AggregationTemporality.CUMULATIVE) { - failWithActualExpectedAndMessage( - actual, - "aggregationTemporality: CUMULATIVE", - "Expected Histogram to have cumulative aggregation but found <%s>", - AggregationTemporality.CUMULATIVE, - actual.getAggregationTemporality()); - } - return this; - } - - /** Ensures that {@code aggregation_temporality} field is {@code DELTA}. */ - public HistogramAssert isDelta() { - isNotNull(); - if (actual.getAggregationTemporality() != AggregationTemporality.DELTA) { - failWithActualExpectedAndMessage( - actual, - "aggregationTemporality: DELTA", - "Expected Histgram to have cumulative aggregation but found <%s>", - AggregationTemporality.DELTA, - actual.getAggregationTemporality()); - } - return this; - } - - /** Returns convenience API to assert against the {@code points} field. */ - public AbstractIterableAssert< - ?, ? extends Iterable, HistogramPointData, ?> - points() { - isNotNull(); - return Assertions.assertThat(actual.getPoints()); - } -} diff --git a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/HistogramPointDataAssert.java b/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/HistogramPointDataAssert.java deleted file mode 100644 index b069f2a0e4c..00000000000 --- a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/HistogramPointDataAssert.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.testing.assertj; - -import io.opentelemetry.sdk.metrics.data.HistogramPointData; -import java.util.Arrays; -import org.assertj.core.api.Assertions; - -/** Test assertions for {@link HistogramPointData}. */ -public class HistogramPointDataAssert - extends AbstractPointDataAssert { - - protected HistogramPointDataAssert(HistogramPointData actual) { - super(actual, HistogramPointDataAssert.class); - } - - /** Ensures the {@code sum} field matches the expected value. */ - public HistogramPointDataAssert hasSum(double expected) { - isNotNull(); - Assertions.assertThat(actual.getSum()).as("sum").isEqualTo(expected); - return this; - } - - /** Ensures the {@code sum} field contains a greater value than the passed {@code boundary}. */ - public HistogramPointDataAssert hasSumGreaterThan(double boundary) { - isNotNull(); - Assertions.assertThat(actual.getSum()).as("sum").isGreaterThan(boundary); - return this; - } - - /** Ensures the {@code min} field matches the expected value. */ - public HistogramPointDataAssert hasMin(double expected) { - isNotNull(); - Assertions.assertThat(actual.hasMin()).isTrue(); - Assertions.assertThat(actual.getMin()).as("min").isEqualTo(expected); - return this; - } - - /** Ensures the {@code max} field matches the expected value. */ - public HistogramPointDataAssert hasMax(double expected) { - isNotNull(); - Assertions.assertThat(actual.hasMax()).isTrue(); - Assertions.assertThat(actual.getMax()).as("max").isEqualTo(expected); - return this; - } - - /** Ensures the {@code count} field matches the expected value. */ - public HistogramPointDataAssert hasCount(long expected) { - isNotNull(); - Assertions.assertThat(actual.getCount()).as("count").isEqualTo(expected); - return this; - } - - /** - * Ensures the {@code boundaries} field matches the expected value. - * - * @param boundaries The set of bucket boundaries in the same order as the expected collection. - */ - public HistogramPointDataAssert hasBucketBoundaries(double... boundaries) { - isNotNull(); - Double[] bigBoundaries = Arrays.stream(boundaries).boxed().toArray(Double[]::new); - Assertions.assertThat(actual.getBoundaries()).as("boundaries").containsExactly(bigBoundaries); - return this; - } - - /** - * Ensures the {@code counts} field matches the expected value. - * - * @param counts The set of bucket counts in the same order as the expected collection. - */ - public HistogramPointDataAssert hasBucketCounts(long... counts) { - isNotNull(); - Long[] bigCounts = Arrays.stream(counts).boxed().toArray(Long[]::new); - Assertions.assertThat(actual.getCounts()).as("bucketCounts").containsExactly(bigCounts); - return this; - } -} diff --git a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/LongPointDataAssert.java b/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/LongPointDataAssert.java deleted file mode 100644 index d2ad5f7c749..00000000000 --- a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/LongPointDataAssert.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.testing.assertj; - -import io.opentelemetry.sdk.metrics.data.LongPointData; -import org.assertj.core.api.Assertions; - -/** Test assertions for {@link LongPointData}. */ -public class LongPointDataAssert - extends AbstractPointDataAssert { - - protected LongPointDataAssert(LongPointData actual) { - super(actual, LongPointDataAssert.class); - } - - /** Ensures the {@code as_int} field matches the expected value. */ - public LongPointDataAssert hasValue(long expected) { - isNotNull(); - Assertions.assertThat(actual.getValue()).as("value").isEqualTo(expected); - return this; - } -} diff --git a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/MetricAssertions.java b/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/MetricAssertions.java index 967bf656f38..b096f8ef640 100644 --- a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/MetricAssertions.java +++ b/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/MetricAssertions.java @@ -5,17 +5,8 @@ package io.opentelemetry.sdk.testing.assertj; -import io.opentelemetry.sdk.metrics.data.DoublePointData; import io.opentelemetry.sdk.metrics.data.ExemplarData; -import io.opentelemetry.sdk.metrics.data.GaugeData; -import io.opentelemetry.sdk.metrics.data.HistogramData; -import io.opentelemetry.sdk.metrics.data.HistogramPointData; -import io.opentelemetry.sdk.metrics.data.LongPointData; import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.metrics.data.PointData; -import io.opentelemetry.sdk.metrics.data.SumData; -import io.opentelemetry.sdk.metrics.data.SummaryData; -import io.opentelemetry.sdk.metrics.data.SummaryPointData; import io.opentelemetry.sdk.metrics.internal.data.exponentialhistogram.ExponentialHistogramBuckets; import io.opentelemetry.sdk.metrics.internal.data.exponentialhistogram.ExponentialHistogramPointData; import org.assertj.core.api.Assertions; @@ -27,34 +18,6 @@ public static MetricDataAssert assertThat(MetricData metric) { return new MetricDataAssert(metric); } - /** Returns an assertion for {@link GaugeData}. */ - // There is no real use case for passing in a GaugeData that is a lambda, if for some reason it is - // desired a cast will still work. - @SuppressWarnings("FunctionalInterfaceClash") - public static GaugeAssert assertThat(GaugeData metric) { - return new GaugeAssert<>(metric); - } - - /** Returns an assertion for {@link HistogramData}. */ - public static HistogramAssert assertThat(HistogramData metric) { - return new HistogramAssert(metric); - } - - /** Returns an assertion for {@link SummaryData}. */ - public static SummaryDataAssert assertThat(SummaryData metric) { - return new SummaryDataAssert(metric); - } - - /** Returns an assertion for {@link HistogramPointData}. */ - public static HistogramPointDataAssert assertThat(HistogramPointData point) { - return new HistogramPointDataAssert(point); - } - - /** Returns an assertion for {@link SummaryPointData}. */ - public static SummaryPointDataAssert assertThat(SummaryPointData point) { - return new SummaryPointDataAssert(point); - } - /** Returns an assertion for {@link ExponentialHistogramPointData}. */ public static ExponentialHistogramPointDataAssert assertThat( ExponentialHistogramPointData point) { @@ -66,21 +29,6 @@ public static ExponentialHistogramBucketsAssert assertThat(ExponentialHistogramB return new ExponentialHistogramBucketsAssert(buckets); } - /** Returns an assertion for {@link DoublePointData}. */ - public static DoublePointDataAssert assertThat(DoublePointData point) { - return new DoublePointDataAssert(point); - } - - /** Returns an assertion for {@link SumData}. */ - public static SumDataAssert assertThat(SumData point) { - return new SumDataAssert<>(point); - } - - /** Returns an assertion for {@link LongPointData}. */ - public static LongPointDataAssert assertThat(LongPointData point) { - return new LongPointDataAssert(point); - } - public static ExemplarDataAssert assertThat(ExemplarData exemplar) { return new ExemplarDataAssert(exemplar); } diff --git a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/MetricDataAssert.java b/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/MetricDataAssert.java index a46cfab75ea..d4e57b4c5a9 100644 --- a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/MetricDataAssert.java +++ b/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/MetricDataAssert.java @@ -6,8 +6,6 @@ package io.opentelemetry.sdk.testing.assertj; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; -import io.opentelemetry.sdk.metrics.data.DoublePointData; -import io.opentelemetry.sdk.metrics.data.LongPointData; import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricDataType; import io.opentelemetry.sdk.metrics.internal.data.exponentialhistogram.ExponentialHistogramData; @@ -94,24 +92,6 @@ public MetricDataAssert hasUnit(String unit) { return this; } - /** - * Ensures this {@link MetricData} is a {@code DoubleHistogram}. - * - * @return convenience API to assert against the {@code DoubleHistogram}. - */ - public HistogramAssert hasDoubleHistogram() { - isNotNull(); - if (actual.getType() != MetricDataType.HISTOGRAM) { - failWithActualExpectedAndMessage( - actual, - "type: HISTOGRAM", - "Expected MetricData to have type <%s> but found <%s>", - MetricDataType.HISTOGRAM, - actual.getType()); - } - return new HistogramAssert(actual.getHistogramData()); - } - /** * Ensures this {@link MetricData} is a {@code ExponentialHistogram}. * @@ -129,94 +109,4 @@ public ExponentialHistogramAssert hasExponentialHistogram() { } return new ExponentialHistogramAssert(ExponentialHistogramData.fromMetricData(actual)); } - - /** - * Ensures this {@link MetricData} is a {@code DoubleGauge}. - * - * @return convenience API to assert against the {@code DoubleGauge}. - */ - public GaugeAssert hasDoubleGauge() { - isNotNull(); - if (actual.getType() != MetricDataType.DOUBLE_GAUGE) { - failWithActualExpectedAndMessage( - actual, - "type: DOUBLE_GAUGE", - "Expected MetricData to have type <%s> but found <%s>", - MetricDataType.DOUBLE_GAUGE, - actual.getType()); - } - return new GaugeAssert<>(actual.getDoubleGaugeData()); - } - - /** - * Ensures this {@link MetricData} is a {@code DoubleSum}. - * - * @return convenience API to assert against the {@code DoubleSum}. - */ - public SumDataAssert hasDoubleSum() { - isNotNull(); - if (actual.getType() != MetricDataType.DOUBLE_SUM) { - failWithActualExpectedAndMessage( - actual, - "type: DOUBLE_SUM", - "Expected MetricData to have type <%s> but found <%s>", - MetricDataType.DOUBLE_SUM, - actual.getType()); - } - return new SumDataAssert<>(actual.getDoubleSumData()); - } - - /** - * Ensures this {@link MetricData} is a {@code LongGauge}. - * - * @return convenience API to assert against the {@code LongGauge}. - */ - public GaugeAssert hasLongGauge() { - isNotNull(); - if (actual.getType() != MetricDataType.LONG_GAUGE) { - failWithActualExpectedAndMessage( - actual, - "type: LONG_GAUGE", - "Expected MetricData to have type <%s> but found <%s>", - MetricDataType.LONG_GAUGE, - actual.getType()); - } - return new GaugeAssert<>(actual.getLongGaugeData()); - } - - /** - * Ensures this {@link MetricData} is a {@code LongSum}. - * - * @return convenience API to assert against the {@code LongSum}. - */ - public SumDataAssert hasLongSum() { - isNotNull(); - if (actual.getType() != MetricDataType.LONG_SUM) { - failWithActualExpectedAndMessage( - actual, - "type: LONG_SUM", - "Expected MetricData to have type <%s> but found <%s>", - MetricDataType.LONG_SUM, - actual.getType()); - } - return new SumDataAssert<>(actual.getLongSumData()); - } - - /** - * Ensures this {@link MetricData} is a {@code DoubleSummaryData}. - * - * @return convenience API to assert against the {@code DoubleSummaryData}. - */ - public SummaryDataAssert hasDoubleSummary() { - isNotNull(); - if (actual.getType() != MetricDataType.SUMMARY) { - failWithActualExpectedAndMessage( - actual, - "type: SUMMARY", - "Expected MetricData to have type <%s> but found <%s>", - MetricDataType.SUMMARY, - actual.getType()); - } - return new SummaryDataAssert(actual.getSummaryData()); - } } diff --git a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/SumDataAssert.java b/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/SumDataAssert.java deleted file mode 100644 index 0d0b2100ca4..00000000000 --- a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/SumDataAssert.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.testing.assertj; - -import io.opentelemetry.sdk.metrics.data.AggregationTemporality; -import io.opentelemetry.sdk.metrics.data.PointData; -import io.opentelemetry.sdk.metrics.data.SumData; -import org.assertj.core.api.AbstractAssert; -import org.assertj.core.api.AbstractIterableAssert; -import org.assertj.core.api.Assertions; - -/** Test assertions for {@link SumData}. */ -public class SumDataAssert - extends AbstractAssert, SumData> { - protected SumDataAssert(SumData actual) { - super(actual, SumDataAssert.class); - } - - /** Ensures that {@code is_monotonic} field is true. */ - public SumDataAssert isMonotonic() { - isNotNull(); - if (!actual.isMonotonic()) { - failWithActualExpectedAndMessage( - actual, "monotonic: true", "Expected Sum to be monotonic", true, actual.isMonotonic()); - } - return myself; - } - - /** Ensures that {@code is_monotonic} field is false. */ - public SumDataAssert isNotMonotonic() { - isNotNull(); - if (actual.isMonotonic()) { - failWithActualExpectedAndMessage( - actual, - "monotonic: fail", - "Expected Sum to be non-monotonic, found: %s", - actual.isMonotonic()); - } - return myself; - } - - /** Ensures that {@code aggregation_temporality} field is {@code CUMULATIVE}. */ - public SumDataAssert isCumulative() { - isNotNull(); - if (actual.getAggregationTemporality() != AggregationTemporality.CUMULATIVE) { - failWithActualExpectedAndMessage( - actual, - "aggregationTemporality: CUMULATIVE", - "Expected Sum to have cumulative aggregation but found <%s>", - actual.getAggregationTemporality()); - } - return myself; - } - - /** Ensures that {@code aggregation_temporality} field is {@code DELTA}. */ - public SumDataAssert isDelta() { - isNotNull(); - if (actual.getAggregationTemporality() != AggregationTemporality.DELTA) { - failWithActualExpectedAndMessage( - actual, - "aggregationTemporality: DELTA", - "Expected Sum to have delta aggregation but found <%s>", - actual.getAggregationTemporality()); - } - return myself; - } - - /** Returns convenience API to assert against the {@code points} field. */ - public AbstractIterableAssert, T, ?> points() { - isNotNull(); - return Assertions.assertThat(actual.getPoints()); - } -} diff --git a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/SummaryDataAssert.java b/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/SummaryDataAssert.java deleted file mode 100644 index d8c08da03a4..00000000000 --- a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/SummaryDataAssert.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.testing.assertj; - -import io.opentelemetry.sdk.metrics.data.SummaryData; -import io.opentelemetry.sdk.metrics.data.SummaryPointData; -import org.assertj.core.api.AbstractAssert; -import org.assertj.core.api.AbstractIterableAssert; -import org.assertj.core.api.Assertions; - -/** Assert on a {@link SummaryData} metric. */ -public class SummaryDataAssert extends AbstractAssert { - - protected SummaryDataAssert(SummaryData actual) { - super(actual, SummaryDataAssert.class); - } - - /** Returns convenience API to assert against the {@code points} field. */ - public AbstractIterableAssert< - ?, ? extends Iterable, SummaryPointData, ?> - points() { - isNotNull(); - return Assertions.assertThat(actual.getPoints()); - } -} diff --git a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/SummaryPointDataAssert.java b/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/SummaryPointDataAssert.java deleted file mode 100644 index 2054d1ccbec..00000000000 --- a/sdk/metrics-testing/src/main/java/io/opentelemetry/sdk/testing/assertj/SummaryPointDataAssert.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * SPDX-License-Identifier: Apache-2.0 - */ - -package io.opentelemetry.sdk.testing.assertj; - -import io.opentelemetry.sdk.metrics.data.SummaryPointData; -import io.opentelemetry.sdk.metrics.data.ValueAtQuantile; -import org.assertj.core.api.Assertions; - -/** Asserts for (deprecated) Summary points. */ -public class SummaryPointDataAssert - extends AbstractPointDataAssert { - protected SummaryPointDataAssert(SummaryPointData actual) { - super(actual, SummaryPointDataAssert.class); - } - - /** Ensure the summary has seen the expected count of measurements. */ - public SummaryPointDataAssert hasCount(long expected) { - isNotNull(); - Assertions.assertThat(actual.getCount()).as("count").isEqualTo(expected); - return this; - } - - /** Ensure the summary has the expected sum across all observed measurements. */ - public SummaryPointDataAssert hasSum(double expected) { - isNotNull(); - Assertions.assertThat(actual.getSum()).as("sum").isEqualTo(expected); - return this; - } - - /** Ensure the summary has exactly, in any order, the given percentile values. */ - public SummaryPointDataAssert hasValues(ValueAtQuantile... values) { - isNotNull(); - Assertions.assertThat(actual.getValues()).containsExactlyInAnyOrder(values); - return this; - } -} diff --git a/sdk/metrics-testing/src/test/java/io/opentelemetry/sdk/testing/assertj/MetricAssertionsTest.java b/sdk/metrics-testing/src/test/java/io/opentelemetry/sdk/testing/assertj/MetricAssertionsTest.java index cb84b64d931..2cf03935f71 100644 --- a/sdk/metrics-testing/src/test/java/io/opentelemetry/sdk/testing/assertj/MetricAssertionsTest.java +++ b/sdk/metrics-testing/src/test/java/io/opentelemetry/sdk/testing/assertj/MetricAssertionsTest.java @@ -10,34 +10,13 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.trace.SpanContext; -import io.opentelemetry.api.trace.TraceFlags; -import io.opentelemetry.api.trace.TraceState; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; -import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; -import io.opentelemetry.sdk.metrics.data.DoublePointData; -import io.opentelemetry.sdk.metrics.data.HistogramPointData; -import io.opentelemetry.sdk.metrics.data.LongExemplarData; -import io.opentelemetry.sdk.metrics.data.LongPointData; import io.opentelemetry.sdk.metrics.data.MetricData; -import io.opentelemetry.sdk.metrics.data.SummaryPointData; -import io.opentelemetry.sdk.metrics.data.ValueAtQuantile; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongExemplarData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryPointData; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableValueAtQuantile; import io.opentelemetry.sdk.metrics.internal.data.exponentialhistogram.ExponentialHistogramData; import io.opentelemetry.sdk.resources.Resource; -import java.util.Arrays; import java.util.Collections; import org.junit.jupiter.api.Test; @@ -46,29 +25,6 @@ public class MetricAssertionsTest { Resource.create(Attributes.of(stringKey("resource_key"), "resource_value")); private static final InstrumentationScopeInfo INSTRUMENTATION_SCOPE_INFO = InstrumentationScopeInfo.create("instrumentation_library"); - private static final MetricData HISTOGRAM_METRIC = - ImmutableMetricData.createDoubleHistogram( - RESOURCE, - INSTRUMENTATION_SCOPE_INFO, - /* name= */ "histogram", - /* description= */ "description", - /* unit= */ "unit", - ImmutableHistogramData.create( - AggregationTemporality.CUMULATIVE, - // Points - Collections.emptyList())); - - private static final MetricData HISTOGRAM_DELTA_METRIC = - ImmutableMetricData.createDoubleHistogram( - RESOURCE, - INSTRUMENTATION_SCOPE_INFO, - /* name= */ "histogram_delta", - /* description= */ "description", - /* unit= */ "unit", - ImmutableHistogramData.create( - AggregationTemporality.DELTA, - // Points - Collections.emptyList())); private static final MetricData EXPONENTIAL_HISTOGRAM_METRIC = ImmutableMetricData.createExponentialHistogram( @@ -94,17 +50,6 @@ public class MetricAssertionsTest { // Points Collections.emptyList())); - private static final MetricData DOUBLE_SUMMARY_METRIC = - ImmutableMetricData.createDoubleSummary( - RESOURCE, - INSTRUMENTATION_SCOPE_INFO, - /* name= */ "summary", - /* description= */ "description", - /* unit= */ "unit", - ImmutableSummaryData.create( - // Points - Collections.emptyList())); - private static final MetricData DOUBLE_GAUGE_METRIC = ImmutableMetricData.createDoubleGauge( RESOURCE, @@ -116,128 +61,12 @@ public class MetricAssertionsTest { // Points Collections.emptyList())); - private static final MetricData DOUBLE_SUM_METRIC = - ImmutableMetricData.createDoubleSum( - RESOURCE, - INSTRUMENTATION_SCOPE_INFO, - /* name= */ "sum", - /* description= */ "description", - /* unit= */ "unit", - ImmutableSumData.create( - true, - AggregationTemporality.CUMULATIVE, - // Points - Collections.emptyList())); - - private static final MetricData DOUBLE_DELTA_SUM_METRIC = - ImmutableMetricData.createDoubleSum( - RESOURCE, - INSTRUMENTATION_SCOPE_INFO, - /* name= */ "sum_delta", - /* description= */ "description", - /* unit= */ "unit", - ImmutableSumData.create( - false, - AggregationTemporality.DELTA, - // Points - Collections.emptyList())); - - private static final DoubleExemplarData DOUBLE_EXEMPLAR = - ImmutableDoubleExemplarData.create( - Attributes.empty(), - 0, - SpanContext.create( - "00000000000000000000000000000001", - "0000000000000002", - TraceFlags.getDefault(), - TraceState.getDefault()), - 1.0); - - private static final DoublePointData DOUBLE_POINT_DATA = - ImmutableDoublePointData.create(1, 2, Attributes.empty(), 3.0, Collections.emptyList()); - - private static final DoublePointData DOUBLE_POINT_DATA_WITH_EXEMPLAR = - ImmutableDoublePointData.create( - 1, 2, Attributes.empty(), 3.0, Collections.singletonList(DOUBLE_EXEMPLAR)); - - private static final MetricData LONG_GAUGE_METRIC = - ImmutableMetricData.createLongGauge( - RESOURCE, - INSTRUMENTATION_SCOPE_INFO, - /* name= */ "gauge", - /* description= */ "description", - /* unit= */ "unit", - ImmutableGaugeData.create( - // Points - Collections.emptyList())); - - private static final MetricData LONG_SUM_METRIC = - ImmutableMetricData.createLongSum( - RESOURCE, - INSTRUMENTATION_SCOPE_INFO, - /* name= */ "sum", - /* description= */ "description", - /* unit= */ "unit", - ImmutableSumData.create( - true, - AggregationTemporality.CUMULATIVE, - // Points - Collections.emptyList())); - - private static final MetricData LONG_DELTA_SUM_METRIC = - ImmutableMetricData.createLongSum( - RESOURCE, - INSTRUMENTATION_SCOPE_INFO, - /* name= */ "sum_delta", - /* description= */ "description", - /* unit= */ "unit", - ImmutableSumData.create( - false, - AggregationTemporality.DELTA, - // Points - Collections.emptyList())); - - private static final LongExemplarData LONG_EXEMPLAR = - ImmutableLongExemplarData.create( - Attributes.empty(), - 0, - SpanContext.create( - "00000000000000000000000000000001", - "0000000000000002", - TraceFlags.getDefault(), - TraceState.getDefault()), - 1); - - private static final LongPointData LONG_POINT_DATA = - ImmutableLongPointData.create(1, 2, Attributes.empty(), 3, Collections.emptyList()); - - private static final LongPointData LONG_POINT_DATA_WITH_EXEMPLAR = - ImmutableLongPointData.create( - 1, 2, Attributes.empty(), 3, Collections.singletonList(LONG_EXEMPLAR)); - - private static final ValueAtQuantile PERCENTILE_VALUE = ImmutableValueAtQuantile.create(0, 1); - - private static final SummaryPointData DOUBLE_SUMMARY_POINT_DATA = - ImmutableSummaryPointData.create( - 1, 2, Attributes.empty(), 1, 2, Collections.singletonList(PERCENTILE_VALUE)); - - private static final HistogramPointData DOUBLE_HISTOGRAM_POINT_DATA = - ImmutableHistogramPointData.create( - 1, - 2, - Attributes.empty(), - 15, - 4.0, - 7.0, - Collections.singletonList(10.0), - Arrays.asList(1L, 2L)); - @Test void metric_passing() { - assertThat(HISTOGRAM_METRIC) + assertThat(EXPONENTIAL_HISTOGRAM_METRIC) .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) - .hasName("histogram") + .hasName("exponential_histogram") .hasDescription("description") .hasUnit("unit"); } @@ -246,37 +75,21 @@ void metric_passing() { void metric_fails() { assertThatThrownBy( () -> - assertThat(HISTOGRAM_METRIC) + assertThat(EXPONENTIAL_HISTOGRAM_METRIC) .hasResource( Resource.create(Attributes.of(stringKey("monkey_key"), "resource_value")))) .isInstanceOf(AssertionError.class); assertThatThrownBy( () -> - assertThat(HISTOGRAM_METRIC) + assertThat(EXPONENTIAL_HISTOGRAM_METRIC) .hasInstrumentationScope( InstrumentationScopeInfo.create("instrumentation_library_for_monkeys"))) .isInstanceOf(AssertionError.class); - assertThatThrownBy(() -> assertThat(HISTOGRAM_METRIC).hasName("Monkeys")) - .isInstanceOf(AssertionError.class); - assertThatThrownBy(() -> assertThat(HISTOGRAM_METRIC).hasDescription("Monkeys")) + assertThatThrownBy(() -> assertThat(EXPONENTIAL_HISTOGRAM_METRIC).hasName("Monkeys")) .isInstanceOf(AssertionError.class); - assertThatThrownBy(() -> assertThat(HISTOGRAM_METRIC).hasUnit("Monkeys")) + assertThatThrownBy(() -> assertThat(EXPONENTIAL_HISTOGRAM_METRIC).hasDescription("Monkeys")) .isInstanceOf(AssertionError.class); - } - - @Test - void histogram_passing() { - assertThat(HISTOGRAM_METRIC).hasDoubleHistogram().isCumulative(); - assertThat(HISTOGRAM_DELTA_METRIC).hasDoubleHistogram().isDelta(); - } - - @Test - void histogram_fails() { - assertThatThrownBy(() -> assertThat(DOUBLE_GAUGE_METRIC).hasDoubleHistogram()) - .isInstanceOf(AssertionError.class); - assertThatThrownBy(() -> assertThat(HISTOGRAM_METRIC).hasDoubleHistogram().isDelta()) - .isInstanceOf(AssertionError.class); - assertThatThrownBy(() -> assertThat(HISTOGRAM_DELTA_METRIC).hasDoubleHistogram().isCumulative()) + assertThatThrownBy(() -> assertThat(EXPONENTIAL_HISTOGRAM_METRIC).hasUnit("Monkeys")) .isInstanceOf(AssertionError.class); } @@ -300,230 +113,4 @@ void exponential_histogram_fails() { .isCumulative()) .isInstanceOf(AssertionError.class); } - - @Test - void summary_passing() { - assertThat(DOUBLE_SUMMARY_METRIC).hasDoubleSummary(); - } - - @Test - void summary_failing() { - assertThatThrownBy(() -> assertThat(DOUBLE_GAUGE_METRIC).hasDoubleSummary()) - .isInstanceOf(AssertionError.class); - } - - @Test - void doubleGauge_passing() { - assertThat(DOUBLE_GAUGE_METRIC).hasDoubleGauge(); - } - - @Test - void doubleGauge_fails() { - assertThatThrownBy(() -> assertThat(HISTOGRAM_DELTA_METRIC).hasDoubleGauge()) - .isInstanceOf(AssertionError.class); - } - - @Test - void doubleSum_passing() { - assertThat(DOUBLE_SUM_METRIC).hasDoubleSum().isCumulative().isMonotonic(); - assertThat(DOUBLE_DELTA_SUM_METRIC).hasDoubleSum().isDelta().isNotMonotonic(); - } - - @Test - void doubleSum_fails() { - assertThatThrownBy(() -> assertThat(HISTOGRAM_DELTA_METRIC).hasDoubleSum()) - .isInstanceOf(AssertionError.class); - - assertThatThrownBy(() -> assertThat(DOUBLE_SUM_METRIC).hasDoubleSum().isDelta()) - .isInstanceOf(AssertionError.class); - - assertThatThrownBy(() -> assertThat(DOUBLE_SUM_METRIC).hasDoubleSum().isNotMonotonic()) - .isInstanceOf(AssertionError.class); - - assertThatThrownBy(() -> assertThat(DOUBLE_DELTA_SUM_METRIC).hasDoubleSum().isCumulative()) - .isInstanceOf(AssertionError.class); - - assertThatThrownBy(() -> assertThat(DOUBLE_DELTA_SUM_METRIC).hasDoubleSum().isMonotonic()) - .isInstanceOf(AssertionError.class); - } - - @Test - void doublePoint_passing() { - assertThat(DOUBLE_POINT_DATA) - .hasStartEpochNanos(1) - .hasEpochNanos(2) - .hasValue(3) - .hasAttributes(Attributes.empty()) - .exemplars() - .isEmpty(); - - assertThat(DOUBLE_POINT_DATA_WITH_EXEMPLAR).hasExemplars(DOUBLE_EXEMPLAR); - } - - @Test - void doublePoint_failing() { - assertThatThrownBy(() -> assertThat(DOUBLE_POINT_DATA).hasStartEpochNanos(2)) - .isInstanceOf(AssertionError.class); - - assertThatThrownBy(() -> assertThat(DOUBLE_POINT_DATA).hasEpochNanos(3)) - .isInstanceOf(AssertionError.class); - assertThatThrownBy(() -> assertThat(DOUBLE_POINT_DATA).hasValue(4)) - .isInstanceOf(AssertionError.class); - assertThatThrownBy( - () -> - assertThat(DOUBLE_POINT_DATA) - .hasAttributes(Attributes.builder().put("x", "y").build())) - .isInstanceOf(AssertionError.class); - assertThatThrownBy( - () -> - assertThat(DOUBLE_POINT_DATA) - .hasExemplars( - ImmutableDoubleExemplarData.create( - Attributes.empty(), - 0, - SpanContext.create( - "00000000000000000000000000000001", - "0000000000000002", - TraceFlags.getDefault(), - TraceState.getDefault()), - 1.0))) - .isInstanceOf(AssertionError.class); - } - - @Test - void longPoint_passing() { - assertThat(LONG_POINT_DATA) - .hasStartEpochNanos(1) - .hasEpochNanos(2) - .hasValue(3) - .hasAttributes(Attributes.empty()) - .exemplars() - .isEmpty(); - - assertThat(LONG_POINT_DATA_WITH_EXEMPLAR).hasExemplars(LONG_EXEMPLAR); - } - - @Test - void longPoint_failing() { - assertThatThrownBy(() -> assertThat(LONG_POINT_DATA).hasStartEpochNanos(2)) - .isInstanceOf(AssertionError.class); - - assertThatThrownBy(() -> assertThat(LONG_POINT_DATA).hasEpochNanos(3)) - .isInstanceOf(AssertionError.class); - assertThatThrownBy(() -> assertThat(LONG_POINT_DATA).hasValue(4)) - .isInstanceOf(AssertionError.class); - assertThatThrownBy( - () -> - assertThat(LONG_POINT_DATA) - .hasAttributes(Attributes.builder().put("x", "y").build())) - .isInstanceOf(AssertionError.class); - assertThatThrownBy( - () -> - assertThat(LONG_POINT_DATA) - .hasExemplars( - ImmutableLongExemplarData.create( - Attributes.empty(), - 0, - SpanContext.create( - "00000000000000000000000000000001", - "0000000000000002", - TraceFlags.getDefault(), - TraceState.getDefault()), - 1))) - .isInstanceOf(AssertionError.class); - } - - @Test - void longSum_passing() { - assertThat(LONG_SUM_METRIC).hasLongSum().isCumulative().isMonotonic(); - assertThat(LONG_DELTA_SUM_METRIC).hasLongSum().isDelta().isNotMonotonic(); - } - - @Test - void longSum_fails() { - assertThatThrownBy(() -> assertThat(HISTOGRAM_DELTA_METRIC).hasLongSum()) - .isInstanceOf(AssertionError.class); - - assertThatThrownBy(() -> assertThat(LONG_SUM_METRIC).hasLongSum().isDelta()) - .isInstanceOf(AssertionError.class); - - assertThatThrownBy(() -> assertThat(LONG_SUM_METRIC).hasLongSum().isNotMonotonic()) - .isInstanceOf(AssertionError.class); - - assertThatThrownBy(() -> assertThat(LONG_DELTA_SUM_METRIC).hasLongSum().isCumulative()) - .isInstanceOf(AssertionError.class); - - assertThatThrownBy(() -> assertThat(LONG_DELTA_SUM_METRIC).hasLongSum().isMonotonic()) - .isInstanceOf(AssertionError.class); - } - - @Test - void longGauge_passing() { - assertThat(LONG_GAUGE_METRIC).hasLongGauge(); - } - - @Test - void longGauge_fails() { - assertThatThrownBy(() -> assertThat(HISTOGRAM_DELTA_METRIC).hasLongGauge()) - .isInstanceOf(AssertionError.class); - } - - @Test - void doubleSummaryPointData_passing() { - assertThat(DOUBLE_SUMMARY_POINT_DATA) - .hasCount(1) - .hasSum(2) - .hasEpochNanos(2) - .hasStartEpochNanos(1) - .hasAttributes(Attributes.empty()) - .hasValues(PERCENTILE_VALUE); - } - - @Test - void doubleSummaryPointData_failing() { - assertThatThrownBy(() -> assertThat(DOUBLE_SUMMARY_POINT_DATA).hasCount(2)) - .isInstanceOf(AssertionError.class); - - assertThatThrownBy(() -> assertThat(DOUBLE_SUMMARY_POINT_DATA).hasSum(1)) - .isInstanceOf(AssertionError.class); - - assertThatThrownBy( - () -> - assertThat(DOUBLE_SUMMARY_POINT_DATA) - .hasValues(ImmutableValueAtQuantile.create(1, 1))) - .isInstanceOf(AssertionError.class); - } - - @Test - void doubleHistogramPointData_passing() { - assertThat(DOUBLE_HISTOGRAM_POINT_DATA) - .hasCount(3) - .hasSum(15) - .hasMin(4.0) - .hasMax(7.0) - .hasSumGreaterThan(10) - .hasEpochNanos(2) - .hasStartEpochNanos(1) - .hasAttributes(Attributes.empty()) - .hasBucketBoundaries(10) - .hasBucketCounts(1, 2); - } - - @Test - void doubleHistogramPointData_failing() { - assertThatThrownBy(() -> assertThat(DOUBLE_HISTOGRAM_POINT_DATA).hasCount(2)) - .isInstanceOf(AssertionError.class); - - assertThatThrownBy(() -> assertThat(DOUBLE_HISTOGRAM_POINT_DATA).hasSum(1)) - .isInstanceOf(AssertionError.class); - - assertThatThrownBy(() -> assertThat(DOUBLE_HISTOGRAM_POINT_DATA).hasSumGreaterThan(20)) - .isInstanceOf(AssertionError.class); - - assertThatThrownBy(() -> assertThat(DOUBLE_HISTOGRAM_POINT_DATA).hasBucketBoundaries(1, 2, 3)) - .isInstanceOf(AssertionError.class); - - assertThatThrownBy(() -> assertThat(DOUBLE_HISTOGRAM_POINT_DATA).hasBucketCounts(1, 2, 3)) - .isInstanceOf(AssertionError.class); - } } diff --git a/sdk/metrics/build.gradle.kts b/sdk/metrics/build.gradle.kts index 8893a21fadc..a9ef2556a76 100644 --- a/sdk/metrics/build.gradle.kts +++ b/sdk/metrics/build.gradle.kts @@ -27,7 +27,7 @@ dependencies { testImplementation("com.google.guava:guava") jmh(project(":sdk:trace")) - jmh(project(":sdk:metrics-testing")) + jmh(project(":sdk:testing")) } testing { diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/CardinalityTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/CardinalityTest.java index 125f2064c4d..2f2451355c8 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/CardinalityTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/CardinalityTest.java @@ -5,13 +5,15 @@ package io.opentelemetry.sdk.metrics; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.ObservableLongMeasurement; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; +import io.opentelemetry.sdk.metrics.data.LongPointData; +import io.opentelemetry.sdk.metrics.data.SumData; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; import java.time.Duration; import java.util.concurrent.atomic.AtomicLong; @@ -62,29 +64,28 @@ void staleMetricsDropped_synchronousInstrument() { // DELTA reader only has latest assertThat(deltaReader.collectAllMetrics()) .as("Delta collection " + i) - .hasSize(1) .satisfiesExactly( metricData -> assertThat(metricData) .hasName("sync-counter") - .hasLongSum() - .isDelta() - .points() - .hasSize(1)); + .hasLongSumSatisfying(sum -> sum.isDelta().hasPointsSatisfying(point -> {}))); // Make sure we preserve previous cumulatives int currentSize = i; assertThat(cumulativeReader.collectAllMetrics()) .as("Cumulative collection " + i) - .hasSize(1) .satisfiesExactly( metricData -> assertThat(metricData) .hasName("sync-counter") - .hasLongSum() - .isCumulative() - .points() - .hasSize(currentSize)); + .hasLongSumSatisfying( + sum -> + sum.isCumulative() + .satisfies( + (Consumer>) + sumPointData -> + assertThat(sumPointData.getPoints().size()) + .isEqualTo(currentSize)))); } // Now punch the limit and ONLY metrics we just recorded stay, due to simplistic GC. for (int i = 2001; i <= 2010; i++) { @@ -92,27 +93,33 @@ void staleMetricsDropped_synchronousInstrument() { } assertThat(deltaReader.collectAllMetrics()) .as("Delta collection - post limit @ 10") - .hasSize(1) .satisfiesExactly( metricData -> assertThat(metricData) .hasName("sync-counter") - .hasLongSum() - .isDelta() - .points() - .hasSize(10)); + .hasLongSumSatisfying( + sum -> + sum.isDelta() + .satisfies( + (Consumer>) + sumPointData -> + assertThat(sumPointData.getPoints().size()) + .isEqualTo(10)))); assertThat(cumulativeReader.collectAllMetrics()) .as("Cumulative collection - post limit @ 10") - .hasSize(1) .satisfiesExactly( metricData -> assertThat(metricData) .hasName("sync-counter") - .hasLongSum() - .isCumulative() - .points() - .hasSize(10)); + .hasLongSumSatisfying( + sum -> + sum.isCumulative() + .satisfies( + (Consumer>) + sumPointData -> + assertThat(sumPointData.getPoints().size()) + .isEqualTo(10)))); } /** @@ -134,27 +141,20 @@ void staleMetricsDropped_asynchronousInstrument() { for (int i = 1; i <= 5; i++) { assertThat(deltaReader.collectAllMetrics()) .as("Delta collection " + i) - .hasSize(1) .satisfiesExactlyInAnyOrder( metricData -> assertThat(metricData) .hasName("async-counter") - .hasLongSum() - .isDelta() - .points() - .hasSize(1)); + .hasLongSumSatisfying(sum -> sum.isDelta().hasPointsSatisfying(point -> {}))); assertThat(cumulativeReader.collectAllMetrics()) .as("Cumulative collection " + i) - .hasSize(1) .satisfiesExactlyInAnyOrder( metricData -> assertThat(metricData) .hasName("async-counter") - .hasLongSum() - .isCumulative() - .points() - .hasSize(1)); + .hasLongSumSatisfying( + sum -> sum.isCumulative().hasPointsSatisfying(point -> {}))); } } @@ -173,41 +173,55 @@ void cardinalityLimits_synchronousInstrument() { assertThat(deltaReader.collectAllMetrics()) .as("Delta collection") - .hasSize(2) .satisfiesExactlyInAnyOrder( metricData -> assertThat(metricData) .hasName("sync-counter1") - .hasLongSum() - .isDelta() - .points() - .hasSize(MAX_ACCUMULATIONS), + .hasLongSumSatisfying( + sum -> + sum.isDelta() + .satisfies( + (Consumer>) + sumPointData -> + assertThat(sumPointData.getPoints().size()) + .isEqualTo(MAX_ACCUMULATIONS))), metricData -> assertThat(metricData) .hasName("sync-counter2") - .hasLongSum() - .isDelta() - .points() - .hasSize(MAX_ACCUMULATIONS)); + .hasLongSumSatisfying( + sum -> + sum.isDelta() + .satisfies( + (Consumer>) + sumPointData -> + assertThat(sumPointData.getPoints().size()) + .isEqualTo(MAX_ACCUMULATIONS)))); assertThat(cumulativeReader.collectAllMetrics()) .as("Cumulative collection") - .hasSize(2) .satisfiesExactlyInAnyOrder( metricData -> assertThat(metricData) .hasName("sync-counter1") - .hasLongSum() - .isCumulative() - .points() - .hasSize(MAX_ACCUMULATIONS), + .hasLongSumSatisfying( + sum -> + sum.isCumulative() + .satisfies( + (Consumer>) + sumPointData -> + assertThat(sumPointData.getPoints().size()) + .isEqualTo(MAX_ACCUMULATIONS))), metricData -> assertThat(metricData) .hasName("sync-counter2") - .hasLongSum() - .isCumulative() - .points() - .hasSize(MAX_ACCUMULATIONS)); + .hasLongSumSatisfying( + sum -> + sum.isCumulative() + .satisfies( + (Consumer>) + sumPointData -> + assertThat(sumPointData.getPoints().size()) + .isEqualTo(MAX_ACCUMULATIONS)))); } /** @@ -227,40 +241,54 @@ void cardinalityLimits_asynchronousInstrument() { assertThat(deltaReader.collectAllMetrics()) .as("Delta collection") - .hasSize(2) .satisfiesExactlyInAnyOrder( metricData -> assertThat(metricData) .hasName("async-counter1") - .hasLongSum() - .isDelta() - .points() - .hasSize(MAX_ACCUMULATIONS), + .hasLongSumSatisfying( + sum -> + sum.isDelta() + .satisfies( + (Consumer>) + sumPointData -> + assertThat(sumPointData.getPoints().size()) + .isEqualTo(MAX_ACCUMULATIONS))), metricData -> assertThat(metricData) .hasName("async-counter2") - .hasLongSum() - .isDelta() - .points() - .hasSize(MAX_ACCUMULATIONS)); + .hasLongSumSatisfying( + sum -> + sum.isDelta() + .satisfies( + (Consumer>) + sumPointData -> + assertThat(sumPointData.getPoints().size()) + .isEqualTo(MAX_ACCUMULATIONS)))); assertThat(cumulativeReader.collectAllMetrics()) .as("Cumulative collection") - .hasSize(2) .satisfiesExactlyInAnyOrder( metricData -> assertThat(metricData) .hasName("async-counter1") - .hasLongSum() - .isCumulative() - .points() - .hasSize(MAX_ACCUMULATIONS), + .hasLongSumSatisfying( + sum -> + sum.isCumulative() + .satisfies( + (Consumer>) + sumPointData -> + assertThat(sumPointData.getPoints().size()) + .isEqualTo(MAX_ACCUMULATIONS))), metricData -> assertThat(metricData) .hasName("async-counter2") - .hasLongSum() - .isCumulative() - .points() - .hasSize(MAX_ACCUMULATIONS)); + .hasLongSumSatisfying( + sum -> + sum.isCumulative() + .satisfies( + (Consumer>) + sumPointData -> + assertThat(sumPointData.getPoints().size()) + .isEqualTo(MAX_ACCUMULATIONS)))); } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/IdentityTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/IdentityTest.java index 35c3ec967a9..ef804752743 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/IdentityTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/IdentityTest.java @@ -5,7 +5,7 @@ package io.opentelemetry.sdk.metrics; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import io.github.netmikey.logunit.api.LogCapturer; import io.opentelemetry.internal.testing.slf4j.SuppressLogger; @@ -55,9 +55,8 @@ void sameMeterSameInstrumentNoViews() { assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(20))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(20)))); meterProvider.get("meter2").counterBuilder("counter2").ofDoubles().build().add(10); meterProvider.get("meter2").counterBuilder("counter2").ofDoubles().build().add(10); @@ -68,9 +67,8 @@ void sameMeterSameInstrumentNoViews() { assertThat(metricData) .hasInstrumentationScope(forMeter("meter2")) .hasName("counter2") - .hasDoubleSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(20))); + .hasDoubleSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(20)))); meterProvider .get("meter3") @@ -92,9 +90,8 @@ void sameMeterSameInstrumentNoViews() { .hasInstrumentationScope(forMeter("meter3")) .hasName("counter3") .hasDescription("description3") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(20))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(20)))); meterProvider.get("meter4").counterBuilder("counter4").setUnit("unit4").build().add(10); meterProvider.get("meter4").counterBuilder("counter4").setUnit("unit4").build().add(10); @@ -106,9 +103,8 @@ void sameMeterSameInstrumentNoViews() { .hasInstrumentationScope(forMeter("meter4")) .hasName("counter4") .hasUnit("unit4") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(20))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(20)))); assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0); } @@ -126,16 +122,14 @@ void sameMeterDifferentInstrumentNoViews() { assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter2") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)))); assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0); } @@ -154,16 +148,14 @@ void differentMeterSameInstrumentNoViews() { assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> assertThat(metricData) .hasInstrumentationScope(forMeter("meter2")) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)))); meterProvider.get("meter1").counterBuilder("counter1").build().add(10); meterProvider @@ -180,17 +172,15 @@ void differentMeterSameInstrumentNoViews() { assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> assertThat(metricData) .hasInstrumentationScope( InstrumentationScopeInfo.create("meter1", "version1", null)) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)))); meterProvider .meterBuilder("meter1") @@ -215,17 +205,15 @@ void differentMeterSameInstrumentNoViews() { .hasInstrumentationScope( InstrumentationScopeInfo.create("meter1", "version1", null)) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> assertThat(metricData) .hasInstrumentationScope( InstrumentationScopeInfo.create("meter1", "version1", "schema1")) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)))); assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0); } @@ -251,16 +239,14 @@ void sameMeterConflictingInstrumentDescriptionNoViews() { .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") .hasDescription("description1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> { assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))); assertThat(metricData.getDescription()).isBlank(); }); @@ -287,17 +273,15 @@ void sameMeterConflictingInstrumentUnitNoViews() { .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") .hasUnit("unit1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") .hasUnit("") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)))); assertThat(metricStorageRegistryLogs.getEvents()) .allSatisfy( @@ -321,18 +305,15 @@ void sameMeterConflictingInstrumentTypeNoViews() { assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") - .hasLongSum() - .isNotMonotonic() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> + sum.isNotMonotonic().hasPointsSatisfying(point -> point.hasValue(10))), metricData -> assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") - .hasLongSum() - .isMonotonic() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10))); + .hasLongSumSatisfying( + sum -> sum.isMonotonic().hasPointsSatisfying(point -> point.hasValue(10)))); assertThat(metricStorageRegistryLogs.getEvents()) .allSatisfy( @@ -356,16 +337,14 @@ void sameMeterConflictingInstrumentValueTypeNoViews() { assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") - .hasDoubleSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasDoubleSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)))); assertThat(metricStorageRegistryLogs.getEvents()) .allSatisfy( @@ -393,9 +372,8 @@ void sameMeterSameInstrumentSingleView() { .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") .hasDescription("description1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(20))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(20)))); assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0); } @@ -419,17 +397,15 @@ void differentMeterSameInstrumentSingleView() { .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") .hasDescription("description1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> assertThat(metricData) .hasInstrumentationScope(forMeter("meter2")) .hasName("counter1") .hasDescription("description1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)))); assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0); } @@ -453,17 +429,15 @@ void sameMeterDifferentInstrumentSingleView() { .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") .hasDescription("description1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter2") .hasDescription("description1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)))); assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0); } @@ -487,16 +461,14 @@ void sameMeterDifferentInstrumentViewSelectingInstrumentName() { .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") .hasDescription("description1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> { assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter2") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))); assertThat(metricData.getDescription()).isBlank(); }); @@ -522,16 +494,14 @@ void sameMeterDifferentInstrumentViewSelectingInstrumentType() { .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") .hasDescription("description1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> { assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter2") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))); assertThat(metricData.getDescription()).isBlank(); }); @@ -557,16 +527,14 @@ void differentMeterSameInstrumentViewSelectingMeterName() { .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") .hasDescription("description1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> { assertThat(metricData) .hasInstrumentationScope(forMeter("meter2")) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))); assertThat(metricData.getDescription()).isBlank(); }); @@ -605,17 +573,15 @@ void differentMeterSameInstrumentViewSelectingMeterVersion() { InstrumentationScopeInfo.create("meter1", "version1", null)) .hasName("counter1") .hasDescription("description1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> { assertThat(metricData) .hasInstrumentationScope( InstrumentationScopeInfo.create("meter1", "version2", null)) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))); assertThat(metricData.getDescription()).isBlank(); }); @@ -654,17 +620,15 @@ void differentMeterSameInstrumentViewSelectingMeterSchema() { InstrumentationScopeInfo.create("meter1", null, "schema1")) .hasName("counter1") .hasDescription("description1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> { assertThat(metricData) .hasInstrumentationScope( InstrumentationScopeInfo.create("meter1", null, "schema2")) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))); assertThat(metricData.getDescription()).isBlank(); }); @@ -693,25 +657,22 @@ void differentMeterDifferentInstrumentViewSelectingInstrumentNameAndMeterName() .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") .hasDescription("description1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> { assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter2") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))); assertThat(metricData.getDescription()).isBlank(); }, metricData -> { assertThat(metricData) .hasInstrumentationScope(forMeter("meter2")) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))); assertThat(metricData.getDescription()).isBlank(); }); @@ -743,17 +704,15 @@ void sameMeterSameInstrumentConflictingViewDescriptions() { .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") .hasDescription("description1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(20)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(20))), metricData -> assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") .hasDescription("description2") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(20))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(20)))); assertThat(metricStorageRegistryLogs.getEvents()) .allSatisfy( @@ -786,16 +745,14 @@ void sameMeterSameInstrumentConflictingViewAggregations() { assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(20)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(20))), metricData -> assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") - .hasDoubleHistogram() - .points() - .satisfiesExactly(point -> assertThat(point).hasSum(20))); + .hasHistogramSatisfying( + histogram -> histogram.hasPointsSatisfying(point -> point.hasSum(20)))); assertThat(metricStorageRegistryLogs.getEvents()) .allSatisfy( @@ -826,16 +783,14 @@ void sameMeterDifferentInstrumentConflictingViewName() { assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter-new") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter-new") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)))); assertThat(metricStorageRegistryLogs.getEvents()) .allSatisfy( @@ -864,16 +819,14 @@ void differentMeterDifferentInstrumentViewSetsName() { assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter-new") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))), metricData -> assertThat(metricData) .hasInstrumentationScope(forMeter("meter2")) .hasName("counter-new") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)))); assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0); } @@ -907,16 +860,14 @@ void sameMeterDifferentInstrumentCompatibleViews() { .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") .hasDescription("description1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(20)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(20))), metricData -> assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1-histogram") - .hasDoubleHistogram() - .points() - .satisfiesExactly(point -> assertThat(point).hasSum(20))); + .hasHistogramSatisfying( + histogram -> histogram.hasPointsSatisfying(point -> point.hasSum(20)))); assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0); assertThat(viewRegistryLogs.getEvents()).hasSize(0); @@ -948,16 +899,14 @@ void sameMeterSameAsyncInstrumentCompatibleViews() { assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(1)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(1))), metricData -> assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter2") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(1))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(1)))); assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0); } @@ -987,16 +936,14 @@ void sameMeterDifferentInstrumentIncompatibleViewAggregation() { assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter1") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(1)), + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(1))), metricData -> assertThat(metricData) .hasInstrumentationScope(forMeter("meter1")) .hasName("counter2") - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(10))); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)))); assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0); assertThat(viewRegistryLogs.getEvents()) diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleCounterTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleCounterTest.java index cb6db077282..43bf889d236 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleCounterTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleCounterTest.java @@ -6,7 +6,8 @@ package io.opentelemetry.sdk.metrics; import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import static org.assertj.core.api.Assertions.assertThatThrownBy; import io.github.netmikey.logunit.api.LogCapturer; @@ -16,7 +17,6 @@ import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.metrics.StressTestRunner.OperationUpdater; -import io.opentelemetry.sdk.metrics.data.PointData; import io.opentelemetry.sdk.metrics.internal.instrument.BoundDoubleCounter; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; @@ -95,17 +95,17 @@ void collectMetrics_WithEmptyAttributes() { .hasName("testCounter") .hasDescription("description") .hasUnit("ms") - .hasDoubleSum() - .isMonotonic() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasValue(24))); + .hasDoubleSumSatisfying( + sum -> + sum.isMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(24)))); } @Test @@ -132,24 +132,23 @@ void collectMetrics_WithMultipleCollects() { .hasName("testCounter") .hasDescription("") .hasUnit("") - .hasDoubleSum() - .isMonotonic() - .isCumulative() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now())) - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point).hasAttributes(Attributes.empty()).hasValue(33.5), - point -> - assertThat(point) - .hasValue(555.9) - .attributes() - .hasSize(1) - .containsEntry("K", "V"))); + .hasDoubleSumSatisfying( + sum -> + sum.isMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(33.5), + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasValue(555.9) + .hasAttributes(attributeEntry("K", "V"))))); // Repeat to prove we keep previous values. testClock.advance(Duration.ofNanos(SECOND_NANOS)); @@ -159,21 +158,23 @@ void collectMetrics_WithMultipleCollects() { .satisfiesExactly( metric -> assertThat(metric) - .hasDoubleSum() - .isCumulative() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now())) - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point).hasAttributes(Attributes.empty()).hasValue(44.5), - point -> - assertThat(point) - .hasAttributes(Attributes.of(stringKey("K"), "V")) - .hasValue(777.9))); + .hasDoubleSumSatisfying( + sum -> + sum.isMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(44.5), + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasValue(777.9) + .hasAttributes(attributeEntry("K", "V"))))); } finally { bound.unbind(); } @@ -234,19 +235,17 @@ void stressTest() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testCounter") - .hasDoubleSum() - .isCumulative() - .isMonotonic() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasValue(80_000) - .attributes() - .hasSize(1) - .containsEntry("K", "V"))); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(80_000) + .hasAttributes(attributeEntry("K", "V"))))); } @Test @@ -281,22 +280,35 @@ void stressTest_WithDifferentLabelSet() { assertThat(metric) .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) - .hasDoubleSum() - .isCumulative() - .isMonotonic() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasValue(40_000)) - .extracting(PointData::getAttributes) - .containsExactlyInAnyOrder( - Attributes.of(stringKey(keys[0]), values[0]), - Attributes.of(stringKey(keys[1]), values[1]), - Attributes.of(stringKey(keys[2]), values[2]), - Attributes.of(stringKey(keys[3]), values[3]))); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(40_000) + .hasAttributes(attributeEntry(keys[0], values[0])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(40_000) + .hasAttributes(attributeEntry(keys[1], values[1])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(40_000) + .hasAttributes(attributeEntry(keys[2], values[2])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(40_000) + .hasAttributes(attributeEntry(keys[3], values[3]))))); } private static class OperationUpdaterWithBinding extends OperationUpdater { diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleGaugeBuilderTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleGaugeBuilderTest.java index 7a9735a0f21..ec98c2c0f64 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleGaugeBuilderTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleGaugeBuilderTest.java @@ -6,7 +6,8 @@ package io.opentelemetry.sdk.metrics; import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.Meter; @@ -41,7 +42,11 @@ void removeCallback() { assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( - metric -> assertThat(metric).hasName("testGauge").hasDoubleGauge().points().hasSize(1)); + metric -> + assertThat(metric) + .hasName("testGauge") + .hasDoubleGaugeSatisfying( + doubleGauge -> doubleGauge.hasPointsSatisfying(poit -> {}))); gauge.close(); @@ -76,15 +81,15 @@ void collectMetrics_WithOneRecord() { .hasName("testObserver") .hasDescription("My own DoubleValueObserver") .hasUnit("ms") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - 1000000000L) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.builder().put("k", "v").build()) - .hasValue(12.1d))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - 1000000000L) + .hasEpochNanos(testClock.now()) + .hasAttributes(attributeEntry("k", "v")) + .hasValue(12.1d)))); testClock.advance(Duration.ofSeconds(1)); assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( @@ -95,14 +100,14 @@ void collectMetrics_WithOneRecord() { .hasName("testObserver") .hasDescription("My own DoubleValueObserver") .hasUnit("ms") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - 2000000000L) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.builder().put("k", "v").build()) - .hasValue(12.1d))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - 2000000000L) + .hasEpochNanos(testClock.now()) + .hasAttributes(attributeEntry("k", "v")) + .hasValue(12.1d)))); } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleHistogramTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleHistogramTest.java index 5db1146badb..1938767602c 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleHistogramTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleHistogramTest.java @@ -6,7 +6,8 @@ package io.opentelemetry.sdk.metrics; import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import static org.assertj.core.api.Assertions.assertThatThrownBy; import io.github.netmikey.logunit.api.LogCapturer; @@ -16,10 +17,10 @@ import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.metrics.StressTestRunner.OperationUpdater; -import io.opentelemetry.sdk.metrics.data.PointData; import io.opentelemetry.sdk.metrics.internal.instrument.BoundDoubleHistogram; import io.opentelemetry.sdk.metrics.internal.view.ExponentialHistogramAggregation; import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.testing.assertj.MetricAssertions; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; import io.opentelemetry.sdk.testing.time.TestClock; import java.time.Duration; @@ -97,21 +98,23 @@ void collectMetrics_WithEmptyAttributes() { .hasName("testHistogram") .hasDescription("description") .hasUnit("ms") - .hasDoubleHistogram() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasCount(2) - .hasSum(24) - .hasBucketBoundaries( - 5, 10, 25, 50, 75, 100, 250, 500, 750, 1_000, 2_500, 5_000, - 7_500, 10_000) - .hasBucketCounts(0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))); + .hasHistogramSatisfying( + histogram -> + histogram + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasCount(2) + .hasSum(24) + .hasBucketBoundaries( + 5, 10, 25, 50, 75, 100, 250, 500, 750, 1_000, 2_500, + 5_000, 7_500, 10_000) + .hasBucketCounts( + 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)))); } @Test @@ -136,26 +139,27 @@ void collectMetrics_WithMultipleCollects() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testHistogram") - .hasDoubleHistogram() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now())) - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasCount(3) - .hasSum(566.3d) - .hasBucketCounts(0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0) - .hasAttributes(Attributes.builder().put("K", "V").build()), - point -> - assertThat(point) - .hasCount(2) - .hasSum(22.2d) - .hasBucketCounts(0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) - .hasAttributes(Attributes.empty()))); + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasCount(3) + .hasSum(566.3d) + .hasBucketCounts( + 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0) + .hasAttributes(attributeEntry("K", "V")), + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasCount(2) + .hasSum(22.2d) + .hasBucketCounts( + 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + .hasAttributes(Attributes.empty())))); // Histograms are cumulative by default. testClock.advance(Duration.ofNanos(SECOND_NANOS)); @@ -168,26 +172,27 @@ void collectMetrics_WithMultipleCollects() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testHistogram") - .hasDoubleHistogram() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now())) - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasCount(4) - .hasSum(788.3) - .hasBucketCounts(0, 0, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 0, 0, 0) - .hasAttributes(Attributes.builder().put("K", "V").build()), - point -> - assertThat(point) - .hasCount(3) - .hasSum(39.2) - .hasBucketCounts(0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) - .hasAttributes(Attributes.empty()))); + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasCount(4) + .hasSum(788.3) + .hasBucketCounts( + 0, 0, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 0, 0, 0) + .hasAttributes(attributeEntry("K", "V")), + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasCount(3) + .hasSum(39.2) + .hasBucketCounts( + 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + .hasAttributes(Attributes.empty())))); } finally { bound.unbind(); } @@ -220,7 +225,7 @@ void collectMetrics_ExponentialHistogramAggregation() { assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( metric -> - assertThat(metric) + MetricAssertions.assertThat(metric) .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testHistogram") @@ -231,7 +236,7 @@ void collectMetrics_ExponentialHistogramAggregation() { .points() .satisfiesExactlyInAnyOrder( point -> { - assertThat(point) + MetricAssertions.assertThat(point) .hasStartEpochNanos(testClock.now() - SECOND_NANOS) .hasEpochNanos(testClock.now()) .hasAttributes(Attributes.empty()) @@ -239,15 +244,15 @@ void collectMetrics_ExponentialHistogramAggregation() { .hasSum(25) .hasScale(-1) .hasZeroCount(0); - assertThat(point.getPositiveBuckets()) + MetricAssertions.assertThat(point.getPositiveBuckets()) .hasOffset(1) .hasCounts(Collections.singletonList(2L)); - assertThat(point.getNegativeBuckets()) + MetricAssertions.assertThat(point.getNegativeBuckets()) .hasOffset(0) .hasCounts(Collections.emptyList()); }, point -> { - assertThat(point) + MetricAssertions.assertThat(point) .hasStartEpochNanos(testClock.now() - SECOND_NANOS) .hasEpochNanos(testClock.now()) .hasAttributes(Attributes.builder().put("key", "value").build()) @@ -255,10 +260,10 @@ void collectMetrics_ExponentialHistogramAggregation() { .hasSum(12) .hasScale(-1) .hasZeroCount(0); - assertThat(point.getPositiveBuckets()) + MetricAssertions.assertThat(point.getPositiveBuckets()) .hasOffset(1) .hasCounts(Collections.singletonList(1L)); - assertThat(point.getNegativeBuckets()) + MetricAssertions.assertThat(point.getNegativeBuckets()) .hasOffset(0) .hasCounts(Collections.emptyList()); })); @@ -321,16 +326,16 @@ void stressTest() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testHistogram") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.of(stringKey("K"), "V")) - .hasCount(8_000) - .hasSum(80_000))); + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasAttributes(attributeEntry("K", "V")) + .hasCount(8_000) + .hasSum(80_000)))); } @Test @@ -369,22 +374,45 @@ void stressTest_WithDifferentLabelSet() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testHistogram") - .hasDoubleHistogram() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasCount(4_000) - .hasSum(40_000) - .hasBucketCounts(0, 2000, 2000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)) - .extracting(PointData::getAttributes) - .containsExactlyInAnyOrder( - Attributes.of(stringKey(keys[0]), values[0]), - Attributes.of(stringKey(keys[1]), values[1]), - Attributes.of(stringKey(keys[2]), values[2]), - Attributes.of(stringKey(keys[3]), values[3]))); + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasCount(4_000) + .hasSum(40_000) + .hasBucketCounts( + 0, 2000, 2000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + .hasAttributes(attributeEntry(keys[0], values[0])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasCount(4_000) + .hasSum(40_000) + .hasBucketCounts( + 0, 2000, 2000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + .hasAttributes(attributeEntry(keys[1], values[1])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasCount(4_000) + .hasSum(40_000) + .hasBucketCounts( + 0, 2000, 2000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + .hasAttributes(attributeEntry(keys[2], values[2])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasCount(4_000) + .hasSum(40_000) + .hasBucketCounts( + 0, 2000, 2000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + .hasAttributes(attributeEntry(keys[3], values[3]))))); } private static class OperationUpdaterWithBinding extends OperationUpdater { diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleUpDownCounterTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleUpDownCounterTest.java index 00f923461a0..a71d9da26e3 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleUpDownCounterTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkDoubleUpDownCounterTest.java @@ -6,7 +6,8 @@ package io.opentelemetry.sdk.metrics; import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import static org.assertj.core.api.Assertions.assertThatThrownBy; import io.opentelemetry.api.common.Attributes; @@ -14,7 +15,6 @@ import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.metrics.StressTestRunner.OperationUpdater; -import io.opentelemetry.sdk.metrics.data.PointData; import io.opentelemetry.sdk.metrics.internal.instrument.BoundDoubleUpDownCounter; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; @@ -98,17 +98,17 @@ void collectMetrics_WithEmptyAttributes() { .hasName("testUpDownCounter") .hasDescription("description") .hasUnit("ms") - .hasDoubleSum() - .isNotMonotonic() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasValue(24))); + .hasDoubleSumSatisfying( + sum -> + sum.isNotMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(24)))); } @Test @@ -135,22 +135,23 @@ void collectMetrics_WithMultipleCollects() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testUpDownCounter") - .hasDoubleSum() - .isNotMonotonic() - .isCumulative() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now())) - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point).hasAttributes(Attributes.empty()).hasValue(33.5), - point -> - assertThat(point) - .hasValue(555.9) - .hasAttributes(Attributes.of(stringKey("K"), "V")))); + .hasDoubleSumSatisfying( + sum -> + sum.isNotMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(33.5), + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasValue(555.9) + .hasAttributes(attributeEntry("K", "V"))))); // Repeat to prove we keep previous values. testClock.advance(Duration.ofNanos(SECOND_NANOS)); @@ -163,22 +164,23 @@ void collectMetrics_WithMultipleCollects() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testUpDownCounter") - .hasDoubleSum() - .isNotMonotonic() - .isCumulative() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now())) - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point).hasAttributes(Attributes.empty()).hasValue(44.5), - point -> - assertThat(point) - .hasAttributes(Attributes.of(stringKey("K"), "V")) - .hasValue(777.9))); + .hasDoubleSumSatisfying( + sum -> + sum.isNotMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(44.5), + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasAttributes(attributeEntry("K", "V")) + .hasValue(777.9)))); } finally { bound.unbind(); } @@ -215,19 +217,17 @@ void stressTest() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testUpDownCounter") - .hasDoubleSum() - .isCumulative() - .isNotMonotonic() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasValue(80_000) - .attributes() - .hasSize(1) - .containsEntry("K", "V"))); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .isNotMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(80_000) + .hasAttributes(attributeEntry("K", "V"))))); } @Test @@ -264,22 +264,35 @@ void stressTest_WithDifferentLabelSet() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testUpDownCounter") - .hasDoubleSum() - .isCumulative() - .isNotMonotonic() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasValue(40_000)) - .extracting(PointData::getAttributes) - .containsExactlyInAnyOrder( - Attributes.of(stringKey(keys[0]), values[0]), - Attributes.of(stringKey(keys[1]), values[1]), - Attributes.of(stringKey(keys[2]), values[2]), - Attributes.of(stringKey(keys[3]), values[3]))); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .isNotMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(40_000) + .hasAttributes(attributeEntry(keys[0], values[0])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(40_000) + .hasAttributes(attributeEntry(keys[1], values[1])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(40_000) + .hasAttributes(attributeEntry(keys[2], values[2])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(40_000) + .hasAttributes(attributeEntry(keys[3], values[3]))))); } private static class OperationUpdaterWithBinding extends OperationUpdater { diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongCounterTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongCounterTest.java index 87fb5bba005..f372ca4ee82 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongCounterTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongCounterTest.java @@ -6,7 +6,8 @@ package io.opentelemetry.sdk.metrics; import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import static org.assertj.core.api.Assertions.assertThatThrownBy; import io.github.netmikey.logunit.api.LogCapturer; @@ -16,7 +17,6 @@ import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.metrics.StressTestRunner.OperationUpdater; -import io.opentelemetry.sdk.metrics.data.PointData; import io.opentelemetry.sdk.metrics.internal.instrument.BoundLongCounter; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; @@ -87,17 +87,18 @@ void collectMetrics_WithEmptyAttributes() { .hasName("testCounter") .hasDescription("description") .hasUnit("By") - .hasLongSum() - .isMonotonic() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasValue(24))); + .hasLongSumSatisfying( + longSum -> + longSum + .isMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(24)))); } @Test @@ -122,21 +123,24 @@ void collectMetrics_WithMultipleCollects() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testCounter") - .hasLongSum() - .isMonotonic() - .isCumulative() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now())) - .satisfiesExactlyInAnyOrder( - point -> assertThat(point).hasAttributes(Attributes.empty()).hasValue(33), - point -> - assertThat(point) - .hasAttributes(Attributes.of(stringKey("K"), "V")) - .hasValue(555))); + .hasLongSumSatisfying( + longSum -> + longSum + .isMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(33), + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasAttributes(attributeEntry("K", "V")) + .hasValue(555)))); // Repeat to prove we keep previous values. testClock.advance(Duration.ofNanos(SECOND_NANOS)); @@ -149,21 +153,24 @@ void collectMetrics_WithMultipleCollects() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testCounter") - .hasLongSum() - .isMonotonic() - .isCumulative() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now())) - .satisfiesExactlyInAnyOrder( - point -> assertThat(point).hasAttributes(Attributes.empty()).hasValue(44), - point -> - assertThat(point) - .hasAttributes(Attributes.of(stringKey("K"), "V")) - .hasValue(777))); + .hasLongSumSatisfying( + longSum -> + longSum + .isMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(44), + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasAttributes(attributeEntry("K", "V")) + .hasValue(777)))); } finally { bound.unbind(); } @@ -224,19 +231,18 @@ void stressTest() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testCounter") - .hasLongSum() - .isCumulative() - .isMonotonic() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasValue(160_000) - .attributes() - .hasSize(1) - .containsEntry("K", "V"))); + .hasLongSumSatisfying( + longSum -> + longSum + .isCumulative() + .isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(160_000) + .hasAttributes(attributeEntry("K", "V"))))); } @Test @@ -272,22 +278,36 @@ void stressTest_WithDifferentLabelSet() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testCounter") - .hasLongSum() - .isCumulative() - .isMonotonic() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasValue(20_000)) - .extracting(PointData::getAttributes) - .containsExactlyInAnyOrder( - Attributes.of(stringKey(keys[0]), values[0]), - Attributes.of(stringKey(keys[1]), values[1]), - Attributes.of(stringKey(keys[2]), values[2]), - Attributes.of(stringKey(keys[3]), values[3]))); + .hasLongSumSatisfying( + longSum -> + longSum + .isCumulative() + .isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(20_000) + .hasAttributes(attributeEntry(keys[0], values[0])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(20_000) + .hasAttributes(attributeEntry(keys[1], values[1])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(20_000) + .hasAttributes(attributeEntry(keys[2], values[2])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(20_000) + .hasAttributes(attributeEntry(keys[3], values[3]))))); } private static class OperationUpdaterWithBinding extends OperationUpdater { diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongGaugeBuilderTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongGaugeBuilderTest.java index 4981703a1a5..c3c5d3d05b3 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongGaugeBuilderTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongGaugeBuilderTest.java @@ -6,7 +6,8 @@ package io.opentelemetry.sdk.metrics; import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.Meter; @@ -44,7 +45,11 @@ void removeCallback() { assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( - metric -> assertThat(metric).hasName("testGauge").hasLongGauge().points().hasSize(1)); + metric -> + assertThat(metric) + .hasName("testGauge") + .hasLongGaugeSatisfying( + longGauge -> longGauge.hasPointsSatisfying(point -> {}))); gauge.close(); @@ -76,15 +81,15 @@ void collectMetrics_WithOneRecord() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testObserver") - .hasLongGauge() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - 1000000000L) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.builder().put("k", "v").build()) - .hasValue(12))); + .hasLongGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - 1000000000L) + .hasEpochNanos(testClock.now()) + .hasAttributes(attributeEntry("k", "v")) + .hasValue(12)))); testClock.advance(Duration.ofSeconds(1)); assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( @@ -93,14 +98,14 @@ void collectMetrics_WithOneRecord() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testObserver") - .hasLongGauge() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - 2000000000L) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.builder().put("k", "v").build()) - .hasValue(12))); + .hasLongGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - 2000000000L) + .hasEpochNanos(testClock.now()) + .hasAttributes(attributeEntry("k", "v")) + .hasValue(12)))); } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongHistogramTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongHistogramTest.java index 0669f63ca13..1a984c93cec 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongHistogramTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongHistogramTest.java @@ -6,7 +6,8 @@ package io.opentelemetry.sdk.metrics; import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import static org.assertj.core.api.Assertions.assertThatThrownBy; import io.github.netmikey.logunit.api.LogCapturer; @@ -16,10 +17,10 @@ import io.opentelemetry.internal.testing.slf4j.SuppressLogger; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.metrics.StressTestRunner.OperationUpdater; -import io.opentelemetry.sdk.metrics.data.PointData; import io.opentelemetry.sdk.metrics.internal.instrument.BoundLongHistogram; import io.opentelemetry.sdk.metrics.internal.view.ExponentialHistogramAggregation; import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.testing.assertj.MetricAssertions; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; import io.opentelemetry.sdk.testing.time.TestClock; import java.time.Duration; @@ -97,21 +98,23 @@ void collectMetrics_WithEmptyAttributes() { .hasName("testHistogram") .hasDescription("description") .hasUnit("By") - .hasDoubleHistogram() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasCount(2) - .hasSum(24) - .hasBucketBoundaries( - 5, 10, 25, 50, 75, 100, 250, 500, 750, 1_000, 2_500, 5_000, - 7_500, 10_000) - .hasBucketCounts(0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))); + .hasHistogramSatisfying( + histogram -> + histogram + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasCount(2) + .hasSum(24) + .hasBucketBoundaries( + 5, 10, 25, 50, 75, 100, 250, 500, 750, 1_000, 2_500, + 5_000, 7_500, 10_000) + .hasBucketCounts( + 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)))); } @Test @@ -136,26 +139,27 @@ void collectMetrics_WithMultipleCollects() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testHistogram") - .hasDoubleHistogram() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now())) - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasCount(3) - .hasSum(445) - .hasBucketCounts(1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0) - .hasAttributes(Attributes.builder().put("K", "V").build()), - point -> - assertThat(point) - .hasCount(2) - .hasSum(23) - .hasBucketCounts(0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) - .hasAttributes(Attributes.empty()))); + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasCount(3) + .hasSum(445) + .hasBucketCounts( + 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0) + .hasAttributes(attributeEntry("K", "V")), + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasCount(2) + .hasSum(23) + .hasBucketCounts( + 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + .hasAttributes(Attributes.empty())))); // Histograms are cumulative by default. testClock.advance(Duration.ofNanos(SECOND_NANOS)); @@ -168,26 +172,27 @@ void collectMetrics_WithMultipleCollects() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testHistogram") - .hasDoubleHistogram() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now())) - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasCount(4) - .hasSum(667) - .hasBucketCounts(1, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0) - .hasAttributes(Attributes.builder().put("K", "V").build()), - point -> - assertThat(point) - .hasCount(3) - .hasSum(40) - .hasBucketCounts(0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) - .hasAttributes(Attributes.empty()))); + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasCount(4) + .hasSum(667) + .hasBucketCounts( + 1, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0) + .hasAttributes(attributeEntry("K", "V")), + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasCount(3) + .hasSum(40) + .hasBucketCounts( + 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + .hasAttributes(Attributes.empty())))); } finally { bound.unbind(); } @@ -221,7 +226,7 @@ void collectMetrics_ExponentialHistogramAggregation() { assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( metric -> - assertThat(metric) + MetricAssertions.assertThat(metric) .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testHistogram") @@ -232,7 +237,7 @@ void collectMetrics_ExponentialHistogramAggregation() { .points() .satisfiesExactlyInAnyOrder( point -> { - assertThat(point) + MetricAssertions.assertThat(point) .hasStartEpochNanos(testClock.now() - SECOND_NANOS) .hasEpochNanos(testClock.now()) .hasAttributes(Attributes.empty()) @@ -240,15 +245,15 @@ void collectMetrics_ExponentialHistogramAggregation() { .hasSum(25) .hasScale(-1) .hasZeroCount(0); - assertThat(point.getPositiveBuckets()) + MetricAssertions.assertThat(point.getPositiveBuckets()) .hasOffset(1) .hasCounts(Collections.singletonList(2L)); - assertThat(point.getNegativeBuckets()) + MetricAssertions.assertThat(point.getNegativeBuckets()) .hasOffset(0) .hasCounts(Collections.emptyList()); }, point -> { - assertThat(point) + MetricAssertions.assertThat(point) .hasStartEpochNanos(testClock.now() - SECOND_NANOS) .hasEpochNanos(testClock.now()) .hasAttributes(Attributes.builder().put("key", "value").build()) @@ -256,10 +261,10 @@ void collectMetrics_ExponentialHistogramAggregation() { .hasSum(12) .hasScale(-1) .hasZeroCount(0); - assertThat(point.getPositiveBuckets()) + MetricAssertions.assertThat(point.getPositiveBuckets()) .hasOffset(1) .hasCounts(Collections.singletonList(1L)); - assertThat(point.getNegativeBuckets()) + MetricAssertions.assertThat(point.getNegativeBuckets()) .hasOffset(0) .hasCounts(Collections.emptyList()); })); @@ -322,16 +327,16 @@ void stressTest() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testHistogram") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.of(stringKey("K"), "V")) - .hasCount(16_000) - .hasSum(160_000))); + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasAttributes(attributeEntry("K", "V")) + .hasCount(16_000) + .hasSum(160_000)))); } @Test @@ -370,22 +375,45 @@ void stressTest_WithDifferentLabelSet() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testHistogram") - .hasDoubleHistogram() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasCount(2_000) - .hasSum(20_000) - .hasBucketCounts(0, 1000, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)) - .extracting(PointData::getAttributes) - .containsExactlyInAnyOrder( - Attributes.of(stringKey(keys[0]), values[0]), - Attributes.of(stringKey(keys[1]), values[1]), - Attributes.of(stringKey(keys[2]), values[2]), - Attributes.of(stringKey(keys[3]), values[3]))); + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasCount(2_000) + .hasSum(20_000) + .hasBucketCounts( + 0, 1000, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + .hasAttributes(attributeEntry(keys[0], values[0])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasCount(2_000) + .hasSum(20_000) + .hasBucketCounts( + 0, 1000, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + .hasAttributes(attributeEntry(keys[1], values[1])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasCount(2_000) + .hasSum(20_000) + .hasBucketCounts( + 0, 1000, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + .hasAttributes(attributeEntry(keys[2], values[2])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasCount(2_000) + .hasSum(20_000) + .hasBucketCounts( + 0, 1000, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + .hasAttributes(attributeEntry(keys[3], values[3]))))); } private static class OperationUpdaterWithBinding extends OperationUpdater { diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongUpDownCounterTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongUpDownCounterTest.java index 957adecbd76..b97212d38d9 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongUpDownCounterTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkLongUpDownCounterTest.java @@ -6,7 +6,8 @@ package io.opentelemetry.sdk.metrics; import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import static org.assertj.core.api.Assertions.assertThatThrownBy; import io.opentelemetry.api.common.Attributes; @@ -14,7 +15,6 @@ import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.metrics.StressTestRunner.OperationUpdater; -import io.opentelemetry.sdk.metrics.data.PointData; import io.opentelemetry.sdk.metrics.internal.instrument.BoundLongUpDownCounter; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader; @@ -90,17 +90,17 @@ void collectMetrics_WithEmptyAttributes() { .hasName("testUpDownCounter") .hasDescription("description") .hasUnit("By") - .hasLongSum() - .isNotMonotonic() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasValue(24))); + .hasLongSumSatisfying( + sum -> + sum.isNotMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(24)))); } @Test @@ -126,21 +126,23 @@ void collectMetrics_WithMultipleCollects() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testUpDownCounter") - .hasLongSum() - .isNotMonotonic() - .isCumulative() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now())) - .satisfiesExactlyInAnyOrder( - point -> assertThat(point).hasAttributes(Attributes.empty()).hasValue(33), - point -> - assertThat(point) - .hasAttributes(Attributes.of(stringKey("K"), "V")) - .hasValue(555))); + .hasLongSumSatisfying( + sum -> + sum.isNotMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(33), + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasAttributes(attributeEntry("K", "V")) + .hasValue(555)))); // Repeat to prove we keep previous values. testClock.advance(Duration.ofNanos(SECOND_NANOS)); @@ -153,21 +155,23 @@ void collectMetrics_WithMultipleCollects() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testUpDownCounter") - .hasLongSum() - .isNotMonotonic() - .isCumulative() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now())) - .satisfiesExactlyInAnyOrder( - point -> assertThat(point).hasAttributes(Attributes.empty()).hasValue(44), - point -> - assertThat(point) - .hasAttributes(Attributes.of(stringKey("K"), "V")) - .hasValue(777))); + .hasLongSumSatisfying( + sum -> + sum.isNotMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(44), + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasAttributes(attributeEntry("K", "V")) + .hasValue(777)))); } finally { bound.unbind(); } @@ -204,19 +208,17 @@ void stressTest() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testUpDownCounter") - .hasLongSum() - .isCumulative() - .isNotMonotonic() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasValue(160_000) - .attributes() - .hasSize(1) - .containsEntry("K", "V"))); + .hasLongSumSatisfying( + sum -> + sum.isCumulative() + .isNotMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(160_000) + .hasAttributes(attributeEntry("K", "V"))))); } @Test @@ -253,22 +255,35 @@ void stressTest_WithDifferentLabelSet() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testUpDownCounter") - .hasLongSum() - .isCumulative() - .isNotMonotonic() - .points() - .allSatisfy( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasValue(20_000)) - .extracting(PointData::getAttributes) - .containsExactlyInAnyOrder( - Attributes.of(stringKey(keys[0]), values[0]), - Attributes.of(stringKey(keys[1]), values[1]), - Attributes.of(stringKey(keys[2]), values[2]), - Attributes.of(stringKey(keys[3]), values[3]))); + .hasLongSumSatisfying( + sum -> + sum.isCumulative() + .isNotMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(20_000) + .hasAttributes(attributeEntry(keys[0], values[0])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(20_000) + .hasAttributes(attributeEntry(keys[1], values[1])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(20_000) + .hasAttributes(attributeEntry(keys[2], values[2])), + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasValue(20_000) + .hasAttributes(attributeEntry(keys[3], values[3]))))); } private static class OperationUpdaterWithBinding extends OperationUpdater { diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkMeterProviderTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkMeterProviderTest.java index 46058f804a7..5135bfe5342 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkMeterProviderTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkMeterProviderTest.java @@ -5,8 +5,8 @@ package io.opentelemetry.sdk.metrics; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; -import static org.assertj.core.api.Assertions.entry; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import static org.mockito.Mockito.when; import io.github.netmikey.logunit.api.LogCapturer; @@ -27,7 +27,6 @@ import io.opentelemetry.context.Scope; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; -import io.opentelemetry.sdk.metrics.data.LongPointData; import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.export.MetricReader; import io.opentelemetry.sdk.metrics.internal.SdkMeterProviderUtil; @@ -120,87 +119,89 @@ void collectAllSyncInstruments() { metric -> assertThat(metric) .hasName("testDoubleHistogram") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasCount(1) - .hasSum(10.1) - .hasBucketCounts(0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)), + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasCount(1) + .hasSum(10.1) + .hasBucketCounts( + 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))), metric -> assertThat(metric) .hasName("testDoubleCounter") - .hasDoubleSum() - .isMonotonic() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasValue(10.1)), + .hasDoubleSumSatisfying( + sum -> + sum.isMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(10.1))), metric -> assertThat(metric) .hasName("testLongHistogram") - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasCount(1) - .hasSum(10) - .hasBucketCounts(0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)), + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasCount(1) + .hasSum(10) + .hasBucketCounts( + 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))), metric -> assertThat(metric) .hasName("testLongUpDownCounter") - .hasLongSum() - .isNotMonotonic() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasValue(-10)), + .hasLongSumSatisfying( + sum -> + sum.isNotMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(-10))), metric -> assertThat(metric) .hasName("testLongCounter") - .hasLongSum() - .isMonotonic() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasValue(10)), + .hasLongSumSatisfying( + sum -> + sum.isMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(10))), metric -> assertThat(metric) .hasName("testDoubleUpDownCounter") - .hasDoubleSum() - .isNotMonotonic() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasValue(-10.1))); + .hasDoubleSumSatisfying( + sum -> + sum.isNotMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(-10.1)))); } @Test @@ -226,16 +227,17 @@ void collectAllSyncInstruments_OverwriteTemporality() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testLongCounter") - .hasDoubleHistogram() - .isDelta() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - 1000000000) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasBucketCounts(1))); + .hasHistogramSatisfying( + histogram -> + histogram + .isDelta() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - 1000000000) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasBucketCounts(1)))); longCounter.add(10, Attributes.empty()); testClock.advance(Duration.ofSeconds(1)); @@ -246,16 +248,17 @@ void collectAllSyncInstruments_OverwriteTemporality() { assertThat(metric) .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) - .hasDoubleHistogram() - .isDelta() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - 1000000000) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasBucketCounts(1))); + .hasHistogramSatisfying( + histogram -> + histogram + .isDelta() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - 1000000000) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasBucketCounts(1)))); } @Test @@ -285,16 +288,17 @@ void collectAllSyncInstruments_DeltaHistogram() { .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasDescription("") .hasUnit("") - .hasDoubleHistogram() - .isDelta() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - 1000000000) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasBucketCounts(1))) + .hasHistogramSatisfying( + histogram -> + histogram + .isDelta() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - 1000000000) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasBucketCounts(1)))) .extracting(MetricData::getName) .containsExactlyInAnyOrder( "testLongCounter", "testDoubleCounter", "testLongHistogram", "testDoubleHistogram"); @@ -314,16 +318,17 @@ void collectAllSyncInstruments_DeltaHistogram() { .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasDescription("") .hasUnit("") - .hasDoubleHistogram() - .isDelta() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - 1000000000) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasBucketCounts(1))) + .hasHistogramSatisfying( + histogram -> + histogram + .isDelta() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - 1000000000) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasBucketCounts(1)))) .extracting(MetricData::getName) .containsExactlyInAnyOrder( "testLongCounter", "testDoubleCounter", "testLongHistogram", "testDoubleHistogram"); @@ -370,83 +375,83 @@ void collectAllAsyncInstruments() { metric -> assertThat(metric) .hasName("testLongSumObserver") - .hasLongSum() - .isMonotonic() - .isCumulative() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasValue(10)), + .hasLongSumSatisfying( + sum -> + sum.isMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(10))), metric -> assertThat(metric) .hasName("testDoubleSumObserver") - .hasDoubleSum() - .isMonotonic() - .isCumulative() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasValue(10.1)), + .hasDoubleSumSatisfying( + sum -> + sum.isMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(10.1))), metric -> assertThat(metric) .hasName("testLongUpDownSumObserver") - .hasLongSum() - .isNotMonotonic() - .isCumulative() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasValue(-10)), + .hasLongSumSatisfying( + sum -> + sum.isNotMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(-10))), metric -> assertThat(metric) .hasName("testDoubleUpDownSumObserver") - .hasDoubleSum() - .isNotMonotonic() - .isCumulative() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasValue(-10.1)), + .hasDoubleSumSatisfying( + sum -> + sum.isNotMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(-10.1))), metric -> assertThat(metric) .hasName("testLongValueObserver") - .hasLongGauge() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasValue(10)), + .hasLongGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(10))), metric -> assertThat(metric) .hasName("testDoubleValueObserver") - .hasDoubleGauge() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()) - .hasAttributes(Attributes.empty()) - .hasValue(10.1))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now()) + .hasAttributes(Attributes.empty()) + .hasValue(10.1)))); } @Test @@ -473,18 +478,11 @@ void removeAsyncInstrument() { .satisfiesExactly( metricData -> assertThat(metricData) - .hasLongSum() - .points() - .hasSize(2) - .satisfiesExactlyInAnyOrder( - pointData -> - assertThat(pointData) - .hasAttributes(Attributes.builder().put("callback", "one").build()), - (Consumer) - longPointData -> - assertThat(longPointData) - .hasAttributes( - Attributes.builder().put("callback", "two").build()))); + .hasLongSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> point.hasAttributes(attributeEntry("callback", "one")), + point -> point.hasAttributes(attributeEntry("callback", "two"))))); observableCounter1.close(); @@ -493,15 +491,10 @@ void removeAsyncInstrument() { .satisfiesExactly( metricData -> assertThat(metricData) - .hasLongSum() - .points() - .hasSize(1) - .satisfiesExactlyInAnyOrder( - (Consumer) - longPointData -> - assertThat(longPointData) - .hasAttributes( - Attributes.builder().put("callback", "two").build()))); + .hasLongSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> point.hasAttributes(attributeEntry("callback", "two"))))); observableCounter2.close(); assertThat(reader.collectAllMetrics()).hasSize(0); @@ -546,14 +539,11 @@ void collectAllMetrics_NoConcurrentCalls() assertThat(allMetricData) .hasSize(4) .allSatisfy( - metricData -> { - assertThat(metricData) - .hasInstrumentationScope(InstrumentationScopeInfo.create("meter")) - .hasLongSum() - .points() - .hasSize(1) - .satisfiesExactly(point -> assertThat(point).hasValue(1)); - }); + metricData -> + assertThat(metricData) + .hasInstrumentationScope(InstrumentationScopeInfo.create("meter")) + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(1)))); } finally { executorService.shutdown(); } @@ -583,12 +573,10 @@ void viewSdk_filterAttributes() { .satisfiesExactly( metric -> assertThat(metric) - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point.getAttributes().asMap()) - .containsOnly(entry(AttributeKey.stringKey("allowed"), "bear")))); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> point.hasAttributes(attributeEntry("allowed", "bear"))))); } @Test @@ -618,11 +606,11 @@ void viewSdk_AllowRenames() { .hasName("not_test") .hasDescription("not_desc") .hasUnit("unit") - .hasDoubleGauge()); + .hasDoubleGaugeSatisfying(gauge -> {})); } @Test - void viewSdk_AllowMulitpleViewsPerSynchronousInstrument() { + void viewSdk_AllowMultipleViewsPerSynchronousInstrument() { InstrumentSelector selector = InstrumentSelector.builder().setName("test").build(); InMemoryMetricReader reader = InMemoryMetricReader.create(); SdkMeterProvider provider = @@ -654,13 +642,13 @@ void viewSdk_AllowMulitpleViewsPerSynchronousInstrument() { .hasName("not_test") .hasDescription("not_desc") .hasUnit("unit") - .hasDoubleHistogram(), + .hasHistogramSatisfying(histogramAssert -> {}), metric -> assertThat(metric) .hasName("not_test_2") .hasDescription("not_desc_2") .hasUnit("unit") - .hasDoubleSum()); + .hasDoubleSumSatisfying(sum -> {})); } @Test @@ -698,13 +686,13 @@ void viewSdk_AllowMultipleViewsPerAsynchronousInstrument() { .hasName("not_test") .hasDescription("not_desc") .hasUnit("unit") - .hasDoubleGauge(), + .hasDoubleGaugeSatisfying(gauge -> {}), metric -> assertThat(metric) .hasName("not_test_2") .hasDescription("not_desc_2") .hasUnit("unit") - .hasDoubleGauge()); + .hasDoubleGaugeSatisfying(gauge -> {})); } @Test @@ -737,14 +725,12 @@ void viewSdk_capturesBaggageFromContext() { metric -> assertThat(metric) .hasName("test") - .hasLongSum() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasAttributes( - Attributes.builder().put("baggage", "value").build()))); + .hasLongSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> + point.hasAttributes(attributeEntry("baggage", "value"))))); } @Test @@ -769,15 +755,15 @@ void sdkMeterProvider_supportsMultipleCollectorsCumulative() { assertThat(metric) .hasResource(RESOURCE) .hasName("testSum") - .hasLongSum() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now()) - .hasValue(1))); + .hasLongSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasValue(1)))); counter.add(1L); testClock.advance(Duration.ofSeconds(1)); @@ -789,15 +775,15 @@ void sdkMeterProvider_supportsMultipleCollectorsCumulative() { assertThat(metric) .hasResource(RESOURCE) .hasName("testSum") - .hasLongSum() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now()) - .hasValue(2))); + .hasLongSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasValue(2)))); // Make sure Collector 1 sees the same point as 2 assertThat(collector1.collectAllMetrics()) @@ -806,15 +792,15 @@ void sdkMeterProvider_supportsMultipleCollectorsCumulative() { assertThat(metric) .hasResource(RESOURCE) .hasName("testSum") - .hasLongSum() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now()) - .hasValue(2))); + .hasLongSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasValue(2)))); } @Test @@ -847,15 +833,15 @@ void sdkMeterProvider_supportsMultipleCollectorsDelta() { assertThat(metric) .hasResource(RESOURCE) .hasName("testSum") - .hasLongSum() - .isDelta() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now()) - .hasValue(1))); + .hasLongSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasValue(1)))); long collectorOneTimeOne = testClock.now(); counter.add(1L); @@ -868,15 +854,15 @@ void sdkMeterProvider_supportsMultipleCollectorsDelta() { assertThat(metric) .hasResource(RESOURCE) .hasName("testSum") - .hasLongSum() - .isDelta() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(startTime) - .hasEpochNanos(testClock.now()) - .hasValue(2))); + .hasLongSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(startTime) + .hasEpochNanos(testClock.now()) + .hasValue(2)))); // Make sure Collector 1 sees the same point as 2, when it collects. assertThat(collector1.collectAllMetrics()) @@ -885,15 +871,15 @@ void sdkMeterProvider_supportsMultipleCollectorsDelta() { assertThat(metric) .hasResource(RESOURCE) .hasName("testSum") - .hasLongSum() - .isDelta() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(collectorOneTimeOne) - .hasEpochNanos(testClock.now()) - .hasValue(1))); + .hasLongSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(collectorOneTimeOne) + .hasEpochNanos(testClock.now()) + .hasValue(1)))); } @Test diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkMeterRegistryTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkMeterRegistryTest.java index eec6345e35c..1edc76dca1d 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkMeterRegistryTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkMeterRegistryTest.java @@ -5,7 +5,7 @@ package io.opentelemetry.sdk.metrics; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.Mockito.mock; @@ -116,16 +116,16 @@ void metricProducer_GetAllMetrics() { metric -> assertThat(metric) .hasName("testLongCounter") - .hasLongSum() - .isCumulative() - .isMonotonic() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasValue(10) - .hasStartEpochNanos(testClock.now()) - .hasEpochNanos(testClock.now()))) + .hasLongSumSatisfying( + sum -> + sum.isCumulative() + .isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasValue(10) + .hasStartEpochNanos(testClock.now()) + .hasEpochNanos(testClock.now())))) .extracting(MetricData::getInstrumentationScopeInfo) .containsExactlyInAnyOrder( ((SdkMeter) sdkMeter1).getInstrumentationScopeInfo(), diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableDoubleCounterTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableDoubleCounterTest.java index b3f956267e7..a568a3ffb2d 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableDoubleCounterTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableDoubleCounterTest.java @@ -6,7 +6,8 @@ package io.opentelemetry.sdk.metrics; import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.ObservableDoubleCounter; @@ -42,7 +43,10 @@ void removeCallback() { assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( - metric -> assertThat(metric).hasName("testCounter").hasDoubleSum().points().hasSize(1)); + metric -> + assertThat(metric) + .hasName("testCounter") + .hasDoubleSumSatisfying(sum -> sum.hasPointsSatisfying(point -> {}))); counter.close(); @@ -87,19 +91,17 @@ void collectMetrics_WithOneRecord() { .hasName("testObserver") .hasDescription("My own DoubleSumObserver") .hasUnit("ms") - .hasDoubleSum() - .isCumulative() - .isMonotonic() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasValue(12.1) - .attributes() - .hasSize(1) - .containsEntry("k", "v"))); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasValue(12.1) + .hasAttributes(attributeEntry("k", "v"))))); testClock.advance(Duration.ofNanos(SECOND_NANOS)); assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( @@ -108,19 +110,17 @@ void collectMetrics_WithOneRecord() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testObserver") - .hasDoubleSum() - .isCumulative() - .isMonotonic() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - 2 * SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasValue(12.1) - .attributes() - .hasSize(1) - .containsEntry("k", "v"))); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - 2 * SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasValue(12.1) + .hasAttributes(attributeEntry("k", "v"))))); } @Test @@ -151,19 +151,18 @@ void collectMetrics_DeltaSumAggregator() { .hasName("testObserver") .hasDescription("My own DoubleSumObserver") .hasUnit("ms") - .hasDoubleSum() - .isDelta() - .isMonotonic() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasValue(12.1) - .attributes() - .hasSize(1) - .containsEntry("k", "v"))); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasValue(12.1) + .hasAttributes( + Attributes.builder().put("k", "v").build())))); testClock.advance(Duration.ofNanos(SECOND_NANOS)); assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( @@ -174,18 +173,16 @@ void collectMetrics_DeltaSumAggregator() { .hasName("testObserver") .hasDescription("My own DoubleSumObserver") .hasUnit("ms") - .hasDoubleSum() - .isDelta() - .isMonotonic() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasValue(0) - .attributes() - .hasSize(1) - .containsEntry("k", "v"))); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasValue(0) + .hasAttributes(attributeEntry("k", "v"))))); } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableDoubleUpDownCounterTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableDoubleUpDownCounterTest.java index 4785df31887..daf0dc0f18e 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableDoubleUpDownCounterTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableDoubleUpDownCounterTest.java @@ -6,7 +6,8 @@ package io.opentelemetry.sdk.metrics; import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.Mockito.mock; @@ -44,7 +45,10 @@ void removeCallback() { assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( - metric -> assertThat(metric).hasName("testCounter").hasDoubleSum().points().hasSize(1)); + metric -> + assertThat(metric) + .hasName("testCounter") + .hasDoubleSumSatisfying(sum -> sum.hasPointsSatisfying(point -> {}))); counter.close(); @@ -85,19 +89,17 @@ void collectMetrics_WithOneRecord() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testObserver") - .hasDoubleSum() - .isCumulative() - .isNotMonotonic() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasValue(12.1) - .attributes() - .hasSize(1) - .containsEntry("k", "v"))); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .isNotMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasValue(12.1) + .hasAttributes(attributeEntry("k", "v"))))); testClock.advance(Duration.ofNanos(SECOND_NANOS)); assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( @@ -106,19 +108,17 @@ void collectMetrics_WithOneRecord() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testObserver") - .hasDoubleSum() - .isCumulative() - .isNotMonotonic() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - 2 * SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasValue(12.1) - .attributes() - .hasSize(1) - .containsEntry("k", "v"))); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .isNotMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - 2 * SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasValue(12.1) + .hasAttributes(attributeEntry("k", "v"))))); } @Test @@ -147,19 +147,17 @@ void collectMetrics_DeltaSumAggregator() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testObserver") - .hasDoubleSum() - .isDelta() - .isNotMonotonic() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasValue(12.1) - .attributes() - .hasSize(1) - .containsEntry("k", "v"))); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .isNotMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasValue(12.1) + .hasAttributes(attributeEntry("k", "v"))))); testClock.advance(Duration.ofNanos(SECOND_NANOS)); assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( @@ -168,19 +166,17 @@ void collectMetrics_DeltaSumAggregator() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testObserver") - .hasDoubleSum() - .isDelta() - .isNotMonotonic() - .points() - .satisfiesExactlyInAnyOrder( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasValue(0) - .attributes() - .hasSize(1) - .containsEntry("k", "v"))); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .isNotMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasValue(0) + .hasAttributes(attributeEntry("k", "v"))))); } @Test diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableLongCounterTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableLongCounterTest.java index 10897457cf6..f4208b1c26d 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableLongCounterTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableLongCounterTest.java @@ -6,7 +6,8 @@ package io.opentelemetry.sdk.metrics; import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.ObservableLongCounter; @@ -41,7 +42,10 @@ void removeCallback() { assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( - metric -> assertThat(metric).hasName("testCounter").hasLongSum().points().hasSize(1)); + metric -> + assertThat(metric) + .hasName("testCounter") + .hasLongSumSatisfying(sum -> sum.hasPointsSatisfying(point -> {}))); counter.close(); @@ -79,19 +83,17 @@ void collectMetrics_WithOneRecord() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testObserver") - .hasLongSum() - .isMonotonic() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasValue(12) - .attributes() - .hasSize(1) - .containsEntry("k", "v"))); + .hasLongSumSatisfying( + sum -> + sum.isMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasValue(12) + .hasAttributes(attributeEntry("k", "v"))))); testClock.advance(Duration.ofNanos(SECOND_NANOS)); assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( @@ -100,19 +102,17 @@ void collectMetrics_WithOneRecord() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testObserver") - .hasLongSum() - .isMonotonic() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - 2 * SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasValue(12) - .attributes() - .hasSize(1) - .containsEntry("k", "v"))); + .hasLongSumSatisfying( + sum -> + sum.isMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - 2 * SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasValue(12) + .hasAttributes(attributeEntry("k", "v"))))); } @Test @@ -137,19 +137,17 @@ void collectMetrics_DeltaSumAggregator() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testObserver") - .hasLongSum() - .isMonotonic() - .isDelta() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasValue(12) - .attributes() - .hasSize(1) - .containsEntry("k", "v"))); + .hasLongSumSatisfying( + sum -> + sum.isMonotonic() + .isDelta() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasValue(12) + .hasAttributes(attributeEntry("k", "v"))))); testClock.advance(Duration.ofNanos(SECOND_NANOS)); assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( @@ -158,18 +156,16 @@ void collectMetrics_DeltaSumAggregator() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testObserver") - .hasLongSum() - .isMonotonic() - .isDelta() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasValue(0) - .attributes() - .hasSize(1) - .containsEntry("k", "v"))); + .hasLongSumSatisfying( + sum -> + sum.isMonotonic() + .isDelta() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasValue(0) + .hasAttributes(attributeEntry("k", "v"))))); } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableLongUpDownCounterTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableLongUpDownCounterTest.java index 60cc5c7ad37..33c3c17167d 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableLongUpDownCounterTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/SdkObservableLongUpDownCounterTest.java @@ -6,7 +6,8 @@ package io.opentelemetry.sdk.metrics; import static io.opentelemetry.api.common.AttributeKey.stringKey; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.ObservableLongUpDownCounter; @@ -41,7 +42,10 @@ void removeCallback() { assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( - metric -> assertThat(metric).hasName("testCounter").hasLongSum().points().hasSize(1)); + metric -> + assertThat(metric) + .hasName("testCounter") + .hasLongSumSatisfying(sum -> sum.hasPointsSatisfying(point -> {}))); counter.close(); @@ -79,19 +83,17 @@ void collectMetrics_WithOneRecord() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testObserver") - .hasLongSum() - .isNotMonotonic() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasValue(12) - .attributes() - .hasSize(1) - .containsEntry("k", "v"))); + .hasLongSumSatisfying( + sum -> + sum.isNotMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasValue(12) + .hasAttributes(attributeEntry("k", "v"))))); testClock.advance(Duration.ofNanos(SECOND_NANOS)); assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( @@ -100,19 +102,17 @@ void collectMetrics_WithOneRecord() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testObserver") - .hasLongSum() - .isNotMonotonic() - .isCumulative() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - 2 * SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasValue(12) - .attributes() - .hasSize(1) - .containsEntry("k", "v"))); + .hasLongSumSatisfying( + sum -> + sum.isNotMonotonic() + .isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - 2 * SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasValue(12) + .hasAttributes(attributeEntry("k", "v"))))); } @Test @@ -139,19 +139,17 @@ void collectMetrics_DeltaSumAggregator() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testObserver") - .hasLongSum() - .isNotMonotonic() - .isDelta() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasValue(12) - .attributes() - .hasSize(1) - .containsEntry("k", "v"))); + .hasLongSumSatisfying( + sum -> + sum.isNotMonotonic() + .isDelta() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasValue(12) + .hasAttributes(attributeEntry("k", "v"))))); testClock.advance(Duration.ofNanos(SECOND_NANOS)); assertThat(sdkMeterReader.collectAllMetrics()) .satisfiesExactly( @@ -160,18 +158,16 @@ void collectMetrics_DeltaSumAggregator() { .hasResource(RESOURCE) .hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO) .hasName("testObserver") - .hasLongSum() - .isNotMonotonic() - .isDelta() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(testClock.now() - SECOND_NANOS) - .hasEpochNanos(testClock.now()) - .hasValue(0) - .attributes() - .hasSize(1) - .containsEntry("k", "v"))); + .hasLongSumSatisfying( + sum -> + sum.isNotMonotonic() + .isDelta() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(testClock.now() - SECOND_NANOS) + .hasEpochNanos(testClock.now()) + .hasValue(0) + .hasAttributes(attributeEntry("k", "v"))))); } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExplicitBucketHistogramAggregatorTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExplicitBucketHistogramAggregatorTest.java index 78acb5bec90..873be155d1c 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExplicitBucketHistogramAggregatorTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExplicitBucketHistogramAggregatorTest.java @@ -5,7 +5,7 @@ package io.opentelemetry.sdk.metrics.internal.aggregator; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import com.google.common.collect.ImmutableList; import io.opentelemetry.api.common.Attributes; @@ -301,17 +301,17 @@ void toMetricDataWithExemplars() { 0, 10, 100)) - .hasDoubleHistogram() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasSum(2) - .hasMin(2) - .hasMax(2) - .hasBucketCounts(1, 0, 0, 0) - .hasCount(1) - .hasExemplars(exemplar)); + .hasHistogramSatisfying( + histogram -> + histogram.hasPointsSatisfying( + point -> + point + .hasSum(2) + .hasMin(2) + .hasMax(2) + .hasBucketCounts(1, 0, 0, 0) + .hasCount(1) + .hasExemplars(exemplar))); } @Test diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramAggregatorTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramAggregatorTest.java index d92cfaf8651..81ffa4d4d5e 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramAggregatorTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramAggregatorTest.java @@ -5,7 +5,7 @@ package io.opentelemetry.sdk.metrics.internal.aggregator; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import com.google.common.collect.ImmutableList; import io.opentelemetry.api.common.Attributes; @@ -24,6 +24,7 @@ import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.metrics.internal.state.ExponentialCounterFactory; import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.testing.assertj.MetricAssertions; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -480,23 +481,23 @@ void testToMetricData() { // Assertions run twice to verify immutability; recordings shouldn't modify the metric data for (int i = 0; i < 2; i++) { - assertThat(metricDataCumulative) + MetricAssertions.assertThat(metricDataCumulative) .hasExponentialHistogram() .isCumulative() .points() .satisfiesExactly( point -> { - assertThat(point) + MetricAssertions.assertThat(point) .hasSum(123.456) .hasScale(20) .hasZeroCount(2) .hasCount(3) .hasExemplars(exemplar); - assertThat(point.getPositiveBuckets()) + MetricAssertions.assertThat(point.getPositiveBuckets()) .hasCounts(Collections.singletonList(1L)) .hasOffset(valueToIndex(20, 123.456)) .hasTotalCount(1); - assertThat(point.getNegativeBuckets()) + MetricAssertions.assertThat(point.getNegativeBuckets()) .hasTotalCount(0) .hasCounts(Collections.emptyList()); }); @@ -553,8 +554,12 @@ void testMultithreadedUpdates() throws InterruptedException { assertThat(acc.getZeroCount()).isEqualTo(numberOfUpdates); assertThat(acc.getSum()).isCloseTo(100.0D * 10000, Offset.offset(0.0001)); // float error assertThat(acc.getScale()).isEqualTo(5); - assertThat(acc.getPositiveBuckets()).hasTotalCount(numberOfUpdates * 3).hasOffset(-107); - assertThat(acc.getNegativeBuckets()).hasTotalCount(numberOfUpdates * 2).hasOffset(-107); + MetricAssertions.assertThat(acc.getPositiveBuckets()) + .hasTotalCount(numberOfUpdates * 3) + .hasOffset(-107); + MetricAssertions.assertThat(acc.getNegativeBuckets()) + .hasTotalCount(numberOfUpdates * 2) + .hasOffset(-107); // Verify positive buckets have correct counts List posCounts = acc.getPositiveBuckets().getBucketCounts(); diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramBucketsTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramBucketsTest.java index 68454206175..9dd3b000f4f 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramBucketsTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleExponentialHistogramBucketsTest.java @@ -5,12 +5,13 @@ package io.opentelemetry.sdk.metrics.internal.aggregator; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; import io.opentelemetry.sdk.metrics.internal.state.ExponentialCounterFactory; +import io.opentelemetry.sdk.testing.assertj.MetricAssertions; import java.util.Arrays; import java.util.Collections; import java.util.stream.Stream; @@ -39,7 +40,7 @@ void testRecordSimple(ExponentialBucketStrategy buckets) { b.record(1); b.record(1); b.record(1); - assertThat(b).hasTotalCount(3).hasCounts(Collections.singletonList(3L)); + MetricAssertions.assertThat(b).hasTotalCount(3).hasCounts(Collections.singletonList(3L)); } @ParameterizedTest @@ -58,7 +59,10 @@ void testDownscale(ExponentialBucketStrategy buckets) { b.record(2); b.record(4); assertThat(b.getScale()).isEqualTo(0); - assertThat(b).hasTotalCount(3).hasCounts(Arrays.asList(1L, 1L, 1L)).hasOffset(0); + MetricAssertions.assertThat(b) + .hasTotalCount(3) + .hasCounts(Arrays.asList(1L, 1L, 1L)) + .hasOffset(0); } @ParameterizedTest diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleLastValueAggregatorTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleLastValueAggregatorTest.java index df252ecdfdc..1b5df5b6fd0 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleLastValueAggregatorTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleLastValueAggregatorTest.java @@ -5,7 +5,7 @@ package io.opentelemetry.sdk.metrics.internal.aggregator; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.SpanContext; @@ -152,14 +152,14 @@ void toMetricData() { .hasName("name") .hasDescription("description") .hasUnit("unit") - .hasDoubleGauge() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasAttributes(Attributes.empty()) - .hasStartEpochNanos(10) - .hasEpochNanos(100) - .hasValue(10)); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point + .hasAttributes(Attributes.empty()) + .hasStartEpochNanos(10) + .hasEpochNanos(100) + .hasValue(10))); } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleSumAggregatorTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleSumAggregatorTest.java index 72a8a004b70..5f287e310ca 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleSumAggregatorTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/DoubleSumAggregatorTest.java @@ -5,7 +5,7 @@ package io.opentelemetry.sdk.metrics.internal.aggregator; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.SpanContext; @@ -209,17 +209,17 @@ void toMetricData() { .hasName("name") .hasDescription("description") .hasUnit("unit") - .hasDoubleSum() - .isCumulative() - .isMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(0) - .hasEpochNanos(100) - .hasAttributes(Attributes.empty()) - .hasValue(10)); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(0) + .hasEpochNanos(100) + .hasAttributes(Attributes.empty()) + .hasValue(10))); } @Test @@ -247,8 +247,7 @@ void toMetricDataWithExemplars() { 0, 10, 100)) - .hasDoubleSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(1).hasExemplars(exemplar)); + .hasDoubleSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(1).hasExemplars(exemplar))); } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongSumAggregatorTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongSumAggregatorTest.java index cc3ffc12e17..5598287f8d2 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongSumAggregatorTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/aggregator/LongSumAggregatorTest.java @@ -5,7 +5,7 @@ package io.opentelemetry.sdk.metrics.internal.aggregator; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.trace.SpanContext; @@ -195,17 +195,17 @@ void toMetricData() { .hasName("name") .hasDescription("description") .hasUnit("unit") - .hasLongSum() - .isCumulative() - .isMonotonic() - .points() - .satisfiesExactly( - point -> - assertThat(point) - .hasStartEpochNanos(0) - .hasEpochNanos(100) - .hasAttributes(Attributes.empty()) - .hasValue(10)); + .hasLongSumSatisfying( + sum -> + sum.isCumulative() + .isMonotonic() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(0) + .hasEpochNanos(100) + .hasAttributes(Attributes.empty()) + .hasValue(10))); } @Test @@ -232,8 +232,7 @@ void toMetricDataWithExemplars() { 0, 10, 100)) - .hasLongSum() - .points() - .satisfiesExactly(point -> assertThat(point).hasValue(1).hasExemplars(exemplar)); + .hasLongSumSatisfying( + sum -> sum.hasPointsSatisfying(point -> point.hasValue(1).hasExemplars(exemplar))); } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/DoubleRandomFixedSizeExemplarReservoirTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/DoubleRandomFixedSizeExemplarReservoirTest.java index 9b4ece7059b..ec72ad9350b 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/DoubleRandomFixedSizeExemplarReservoirTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/DoubleRandomFixedSizeExemplarReservoirTest.java @@ -5,7 +5,7 @@ package io.opentelemetry.sdk.metrics.internal.exemplar; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; @@ -16,6 +16,7 @@ import io.opentelemetry.context.Context; import io.opentelemetry.sdk.internal.RandomSupplier; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; +import io.opentelemetry.sdk.testing.assertj.MetricAssertions; import io.opentelemetry.sdk.testing.time.TestClock; import java.time.Duration; import java.util.Random; @@ -43,7 +44,7 @@ public void oneMeasurement_alwaysSamplesFirstMeasurement() { .hasSize(1) .satisfiesExactly( exemplar -> - assertThat(exemplar) + MetricAssertions.assertThat(exemplar) .hasEpochNanos(clock.now()) .hasFilteredAttributes(Attributes.empty()) .hasValue(1.1)); @@ -55,7 +56,7 @@ public void oneMeasurement_alwaysSamplesFirstMeasurement() { .hasSize(1) .satisfiesExactly( exemplar -> - assertThat(exemplar) + MetricAssertions.assertThat(exemplar) .hasEpochNanos(clock.now()) .hasFilteredAttributes(Attributes.empty()) .hasValue(2)); @@ -74,7 +75,7 @@ public void oneMeasurement_filtersAttributes() { assertThat(reservoir.collectAndReset(partial)) .satisfiesExactly( exemplar -> - assertThat(exemplar) + MetricAssertions.assertThat(exemplar) .hasEpochNanos(clock.now()) .hasValue(1.1) .hasFilteredAttributes(remaining)); @@ -97,7 +98,7 @@ public void oneMeasurement_includesTraceAndSpanIds() { assertThat(reservoir.collectAndReset(Attributes.empty())) .satisfiesExactly( exemplar -> - assertThat(exemplar) + MetricAssertions.assertThat(exemplar) .hasEpochNanos(clock.now()) .hasValue(1) .hasFilteredAttributes(all) @@ -130,7 +131,9 @@ public int nextInt(int max) { reservoir.offerDoubleMeasurement(3, Attributes.of(key, 3L), Context.root()); assertThat(reservoir.collectAndReset(Attributes.empty())) .satisfiesExactlyInAnyOrder( - exemplar -> assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(2), - exemplar -> assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(3)); + exemplar -> + MetricAssertions.assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(2), + exemplar -> + MetricAssertions.assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(3)); } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/HistogramExemplarReservoirTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/HistogramExemplarReservoirTest.java index 5659a70381a..38702a78553 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/HistogramExemplarReservoirTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/HistogramExemplarReservoirTest.java @@ -5,12 +5,13 @@ package io.opentelemetry.sdk.metrics.internal.exemplar; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.context.Context; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; +import io.opentelemetry.sdk.testing.assertj.MetricAssertions; import io.opentelemetry.sdk.testing.time.TestClock; import java.time.Duration; import java.util.Arrays; @@ -36,7 +37,7 @@ public void oneBucket_samplesEverything() { .hasSize(1) .satisfiesExactly( exemplar -> - assertThat(exemplar) + MetricAssertions.assertThat(exemplar) .hasEpochNanos(clock.now()) .hasFilteredAttributes(Attributes.empty()) .hasValue(1.1)); @@ -47,7 +48,7 @@ public void oneBucket_samplesEverything() { .hasSize(1) .satisfiesExactly( exemplar -> - assertThat(exemplar) + MetricAssertions.assertThat(exemplar) .hasEpochNanos(clock.now()) .hasFilteredAttributes(Attributes.empty()) .hasValue(2)); @@ -59,7 +60,7 @@ public void oneBucket_samplesEverything() { .hasSize(1) .satisfiesExactly( exemplar -> - assertThat(exemplar) + MetricAssertions.assertThat(exemplar) .hasEpochNanos(clock.now()) .hasFilteredAttributes(Attributes.empty()) .hasValue(4)); @@ -78,9 +79,21 @@ public void multipleBuckets_samplesIntoCorrectBucket() { assertThat(reservoir.collectAndReset(Attributes.empty())) .hasSize(4) .satisfiesExactlyInAnyOrder( - e -> assertThat(e).hasValue(-1.1).hasFilteredAttributes(Attributes.of(bucketKey, 0L)), - e -> assertThat(e).hasValue(1).hasFilteredAttributes(Attributes.of(bucketKey, 1L)), - e -> assertThat(e).hasValue(11).hasFilteredAttributes(Attributes.of(bucketKey, 2L)), - e -> assertThat(e).hasValue(21).hasFilteredAttributes(Attributes.of(bucketKey, 3L))); + e -> + MetricAssertions.assertThat(e) + .hasValue(-1.1) + .hasFilteredAttributes(Attributes.of(bucketKey, 0L)), + e -> + MetricAssertions.assertThat(e) + .hasValue(1) + .hasFilteredAttributes(Attributes.of(bucketKey, 1L)), + e -> + MetricAssertions.assertThat(e) + .hasValue(11) + .hasFilteredAttributes(Attributes.of(bucketKey, 2L)), + e -> + MetricAssertions.assertThat(e) + .hasValue(21) + .hasFilteredAttributes(Attributes.of(bucketKey, 3L))); } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/LongRandomFixedSizeExemplarReservoirTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/LongRandomFixedSizeExemplarReservoirTest.java index aa078a2aabf..a59eb42b4f6 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/LongRandomFixedSizeExemplarReservoirTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/exemplar/LongRandomFixedSizeExemplarReservoirTest.java @@ -5,7 +5,7 @@ package io.opentelemetry.sdk.metrics.internal.exemplar; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; @@ -16,6 +16,7 @@ import io.opentelemetry.context.Context; import io.opentelemetry.sdk.internal.RandomSupplier; import io.opentelemetry.sdk.metrics.data.LongExemplarData; +import io.opentelemetry.sdk.testing.assertj.MetricAssertions; import io.opentelemetry.sdk.testing.time.TestClock; import java.time.Duration; import java.util.Random; @@ -43,7 +44,7 @@ public void oneMeasurement_alwaysSamplesFirstMeasurement() { .hasSize(1) .satisfiesExactly( exemplar -> - assertThat(exemplar) + MetricAssertions.assertThat(exemplar) .hasEpochNanos(clock.now()) .hasFilteredAttributes(Attributes.empty()) .hasValue(1)); @@ -55,7 +56,7 @@ public void oneMeasurement_alwaysSamplesFirstMeasurement() { .hasSize(1) .satisfiesExactly( exemplar -> - assertThat(exemplar) + MetricAssertions.assertThat(exemplar) .hasEpochNanos(clock.now()) .hasFilteredAttributes(Attributes.empty()) .hasValue(2)); @@ -74,7 +75,7 @@ public void oneMeasurement_filtersAttributes() { assertThat(reservoir.collectAndReset(partial)) .satisfiesExactly( exemplar -> - assertThat(exemplar) + MetricAssertions.assertThat(exemplar) .hasEpochNanos(clock.now()) .hasValue(1) .hasFilteredAttributes(remaining)); @@ -97,7 +98,7 @@ public void oneMeasurement_includesTraceAndSpanIds() { assertThat(reservoir.collectAndReset(Attributes.empty())) .satisfiesExactly( exemplar -> - assertThat(exemplar) + MetricAssertions.assertThat(exemplar) .hasEpochNanos(clock.now()) .hasValue(1) .hasFilteredAttributes(all) @@ -130,7 +131,9 @@ public int nextInt(int max) { reservoir.offerLongMeasurement(3, Attributes.of(key, 3L), Context.root()); assertThat(reservoir.collectAndReset(Attributes.empty())) .satisfiesExactlyInAnyOrder( - exemplar -> assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(2), - exemplar -> assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(3)); + exemplar -> + MetricAssertions.assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(2), + exemplar -> + MetricAssertions.assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(3)); } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/AsynchronousMetricStorageTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/AsynchronousMetricStorageTest.java index 9d7c72cf77d..211bb14ce3b 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/AsynchronousMetricStorageTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/AsynchronousMetricStorageTest.java @@ -5,7 +5,8 @@ package io.opentelemetry.sdk.metrics.internal.state; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.when; @@ -84,20 +85,16 @@ void recordLong() { /* suppressSynchronousCollection= */ false)) .satisfies( metricData -> - assertThat(metricData.getLongSumData().getPoints()) - .satisfiesExactlyInAnyOrder( - pointData -> - assertThat(pointData) - .hasValue(1) - .hasAttributes(Attributes.builder().put("key", "a").build()), - pointData -> - assertThat(pointData) - .hasValue(2) - .hasAttributes(Attributes.builder().put("key", "b").build()), - pointData -> - assertThat(pointData) - .hasValue(3) - .hasAttributes(Attributes.builder().put("key", "c").build()))); + assertThat(metricData) + .hasLongSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point.hasValue(1).hasAttributes(attributeEntry("key", "a")), + point -> + point.hasValue(2).hasAttributes(attributeEntry("key", "b")), + point -> + point.hasValue(3).hasAttributes(attributeEntry("key", "c"))))); assertThat(logs.size()).isEqualTo(0); } @@ -123,20 +120,18 @@ void recordDouble() { /* suppressSynchronousCollection= */ false)) .satisfies( metricData -> - assertThat(metricData.getDoubleSumData().getPoints()) - .satisfiesExactlyInAnyOrder( - pointData -> - assertThat(pointData) - .hasValue(1.1) - .hasAttributes(Attributes.builder().put("key", "a").build()), - pointData -> - assertThat(pointData) - .hasValue(2.2) - .hasAttributes(Attributes.builder().put("key", "b").build()), - pointData -> - assertThat(pointData) - .hasValue(3.3) - .hasAttributes(Attributes.builder().put("key", "c").build()))); + assertThat(metricData) + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point.hasValue(1.1).hasAttributes(attributeEntry("key", "a")), + point -> + point.hasValue(2.2).hasAttributes(attributeEntry("key", "b")), + point -> + point + .hasValue(3.3) + .hasAttributes(attributeEntry("key", "c"))))); assertThat(logs.size()).isEqualTo(0); } @@ -164,12 +159,12 @@ void record_ProcessesAttributes() { /* suppressSynchronousCollection= */ false)) .satisfies( metricData -> - assertThat(metricData.getLongSumData().getPoints()) - .satisfiesExactlyInAnyOrder( - pointData -> - assertThat(pointData) - .hasValue(1) - .hasAttributes(Attributes.builder().put("key1", "a").build()))); + assertThat(metricData) + .hasLongSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point.hasValue(1).hasAttributes(attributeEntry("key1", "a"))))); assertThat(logs.size()).isEqualTo(0); } @@ -221,12 +216,12 @@ void record_DuplicateAttributes() { /* suppressSynchronousCollection= */ false)) .satisfies( metricData -> - assertThat(metricData.getLongSumData().getPoints()) - .satisfiesExactlyInAnyOrder( - pointData -> - assertThat(pointData) - .hasValue(1) - .hasAttributes(Attributes.builder().put("key1", "a").build()))); + assertThat(metricData) + .hasLongSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> + point.hasValue(1).hasAttributes(attributeEntry("key1", "a"))))); logs.assertContains("Instrument name has recorded multiple values for the same attributes"); } } diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/SynchronousMetricStorageTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/SynchronousMetricStorageTest.java index a2af78fb76e..45f38559421 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/SynchronousMetricStorageTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/SynchronousMetricStorageTest.java @@ -5,7 +5,8 @@ package io.opentelemetry.sdk.metrics.internal.state; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.context.Context; @@ -91,15 +92,12 @@ void attributesProcessor_applied() { testClock.now(), false); assertThat(md) - .hasDoubleGauge() - .points() - .allSatisfy( - p -> - assertThat(p) - .attributes() - .hasSize(2) - .containsEntry("modifiedK", "modifiedV") - .containsEntry("K", "V")); + .hasDoubleGaugeSatisfying( + gauge -> + gauge.hasPointsSatisfying( + point -> + point.hasAttributes( + attributeEntry("K", "V"), attributeEntry("modifiedK", "modifiedV")))); } @Test diff --git a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/TemporalMetricStorageTest.java b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/TemporalMetricStorageTest.java index 25f3d87b97a..55a35e75cc0 100644 --- a/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/TemporalMetricStorageTest.java +++ b/sdk/metrics/src/test/java/io/opentelemetry/sdk/metrics/internal/state/TemporalMetricStorageTest.java @@ -5,7 +5,7 @@ package io.opentelemetry.sdk.metrics.internal.state; -import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; @@ -14,11 +14,9 @@ import io.opentelemetry.sdk.metrics.InstrumentValueType; import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; -import io.opentelemetry.sdk.metrics.data.PointData; import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory; import io.opentelemetry.sdk.metrics.internal.aggregator.DoubleAccumulation; -import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarFilter; @@ -88,12 +86,11 @@ void synchronousCumulative_joinsWithLastMeasurementForCumulative() { createMeasurement(3), 0, 10)) - .hasDoubleSum() - .isCumulative() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3)); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3))); // Send in new measurement at time 30 for collector 1 assertThat( storage.buildMetricFor( @@ -105,12 +102,11 @@ void synchronousCumulative_joinsWithLastMeasurementForCumulative() { createMeasurement(3), 0, 30)) - .hasDoubleSum() - .isCumulative() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(30).hasValue(6)); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(30).hasValue(6))); // Send in new measurement at time 40 for collector 2 assertThat( storage.buildMetricFor( @@ -122,12 +118,11 @@ void synchronousCumulative_joinsWithLastMeasurementForCumulative() { createMeasurement(4), 0, 60)) - .hasDoubleSum() - .isCumulative() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4)); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4))); // Send in new measurement at time 35 for collector 1 assertThat( storage.buildMetricFor( @@ -139,12 +134,11 @@ void synchronousCumulative_joinsWithLastMeasurementForCumulative() { createMeasurement(2), 0, 35)) - .hasDoubleSum() - .isCumulative() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(35).hasValue(8)); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(35).hasValue(8))); } @Test @@ -168,13 +162,19 @@ void synchronousCumulative_dropsStaleAtLimit() { measurement1, 0, 10)) - .hasDoubleSum() - .isCumulative() - .points() - .hasSize(MetricStorageUtils.MAX_ACCUMULATIONS) - .isNotEmpty() - .allSatisfy(point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3)); - + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .satisfies( + sumPoint -> + assertThat(sumPoint.getPoints()) + .hasSize(MetricStorageUtils.MAX_ACCUMULATIONS) + .allSatisfy( + sumPointData -> { + assertThat(sumPointData.getStartEpochNanos()).isEqualTo(0); + assertThat(sumPointData.getEpochNanos()).isEqualTo(10); + assertThat(sumPointData.getValue()).isEqualTo(3); + }))); // Send in new measurement at time 20 for collector 1, with attr2 // Result should drop accumulation for attr1, only reporting accumulation for attr2 Map measurement2 = new HashMap<>(); @@ -193,13 +193,8 @@ void synchronousCumulative_dropsStaleAtLimit() { measurement2, 0, 20)) - .hasDoubleSum() - .isCumulative() - .points() - .hasSize(1) // Limiting to only recent measurements means we cut everything here. - .isNotEmpty() - .extracting(PointData::getAttributes) - .contains(attr2); + .hasDoubleSumSatisfying( + sum -> sum.isCumulative().hasPointsSatisfying(point -> point.hasAttributes(attr2))); } @Test @@ -221,12 +216,11 @@ void synchronousDelta_dropsStale() { measurement1, 0, 10)) - .hasDoubleSum() - .isDelta() - .points() - .hasSize(1) - .isNotEmpty() - .contains(ImmutableDoublePointData.create(0, 10, attr1, 3)); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3))); // Send in new measurement at time 20 for collector 1, with attr2 // Result should drop accumulation for attr1, only reporting accumulation for attr2 @@ -243,12 +237,16 @@ void synchronousDelta_dropsStale() { measurement2, 0, 20)) - .hasDoubleSum() - .isDelta() - .points() - .hasSize(1) - .isNotEmpty() - .containsExactly(ImmutableDoublePointData.create(10, 20, attr2, 7)); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(10) + .hasEpochNanos(20) + .hasAttributes(attr2) + .hasValue(7))); } @Test @@ -267,12 +265,10 @@ void synchronousDelta_useLastTimestamp() { createMeasurement(3), 0, 10)) - .hasDoubleSum() - .isDelta() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3)); + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3))); // Send in new measurement at time 30 for collector 1 assertThat( storage.buildMetricFor( @@ -284,12 +280,10 @@ void synchronousDelta_useLastTimestamp() { createMeasurement(3), 0, 30)) - .hasDoubleSum() - .isDelta() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(10).hasEpochNanos(30).hasValue(3)); + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> point.hasStartEpochNanos(10).hasEpochNanos(30).hasValue(3))); // Send in new measurement at time 40 for collector 2 assertThat( storage.buildMetricFor( @@ -301,12 +295,11 @@ void synchronousDelta_useLastTimestamp() { createMeasurement(4), 0, 60)) - .hasDoubleSum() - .isDelta() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4)); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4))); // Send in new measurement at time 35 for collector 1 assertThat( storage.buildMetricFor( @@ -318,12 +311,11 @@ void synchronousDelta_useLastTimestamp() { createMeasurement(2), 0, 35)) - .hasDoubleSum() - .isDelta() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(30).hasEpochNanos(35).hasValue(2)); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(30).hasEpochNanos(35).hasValue(2))); } @Test @@ -341,12 +333,11 @@ void synchronous_deltaAndCumulative() { createMeasurement(3), 0, 10)) - .hasDoubleSum() - .isDelta() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3)); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3))); // Send in new measurement at time 30 for collector 1 assertThat( storage.buildMetricFor( @@ -358,12 +349,11 @@ void synchronous_deltaAndCumulative() { createMeasurement(3), 0, 30)) - .hasDoubleSum() - .isDelta() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(10).hasEpochNanos(30).hasValue(3)); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(10).hasEpochNanos(30).hasValue(3))); // Send in new measurement at time 40 for collector 2 assertThat( storage.buildMetricFor( @@ -375,12 +365,11 @@ void synchronous_deltaAndCumulative() { createMeasurement(4), 0, 40)) - .hasDoubleSum() - .isCumulative() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(40).hasValue(4)); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(40).hasValue(4))); // Send in new measurement at time 35 for collector 1 assertThat( storage.buildMetricFor( @@ -392,12 +381,11 @@ void synchronous_deltaAndCumulative() { createMeasurement(2), 0, 35)) - .hasDoubleSum() - .isDelta() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(30).hasEpochNanos(35).hasValue(2)); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(30).hasEpochNanos(35).hasValue(2))); // Send in new measurement at time 60 for collector 2 assertThat( storage.buildMetricFor( @@ -409,12 +397,11 @@ void synchronous_deltaAndCumulative() { createMeasurement(4), 0, 60)) - .hasDoubleSum() - .isCumulative() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(60).hasValue(8)); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(60).hasValue(8))); } @Test @@ -433,12 +420,11 @@ void asynchronousCumulative_doesNotJoin() { createMeasurement(3), 0, 10)) - .hasDoubleSum() - .isCumulative() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3)); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3))); // Send in new measurement at time 30 for collector 1 assertThat( storage.buildMetricFor( @@ -450,12 +436,10 @@ void asynchronousCumulative_doesNotJoin() { createMeasurement(3), 0, 30)) - .hasDoubleSum() - .isCumulative() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(30).hasValue(3)); + .hasDoubleSumSatisfying( + sum -> + sum.hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(30).hasValue(3))); // Send in new measurement at time 40 for collector 2 assertThat( storage.buildMetricFor( @@ -467,12 +451,11 @@ void asynchronousCumulative_doesNotJoin() { createMeasurement(4), 0, 60)) - .hasDoubleSum() - .isCumulative() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4)); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4))); // Send in new measurement at time 35 for collector 1 assertThat( storage.buildMetricFor( @@ -484,12 +467,11 @@ void asynchronousCumulative_doesNotJoin() { createMeasurement(2), 0, 35)) - .hasDoubleSum() - .isCumulative() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(35).hasValue(2)); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(35).hasValue(2))); } @Test @@ -511,12 +493,16 @@ void asynchronousCumulative_dropsStale() { measurement1, 0, 10)) - .hasDoubleSum() - .isCumulative() - .points() - .hasSize(1) - .isNotEmpty() - .contains(ImmutableDoublePointData.create(0, 10, attr1, 3)); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(0) + .hasEpochNanos(10) + .hasAttributes(attr1) + .hasValue(3))); // Send in new measurement at time 20 for collector 1, with attr2 // Result should drop accumulation for attr1, only reporting accumulation for attr2 @@ -533,12 +519,16 @@ void asynchronousCumulative_dropsStale() { measurement2, 0, 20)) - .hasDoubleSum() - .isCumulative() - .points() - .hasSize(1) - .isNotEmpty() - .containsExactly(ImmutableDoublePointData.create(0, 20, attr2, 7)); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(0) + .hasEpochNanos(20) + .hasAttributes(attr2) + .hasValue(7))); } @Test @@ -560,12 +550,16 @@ void asynchronousDelta_dropsStale() { measurement1, 0, 10)) - .hasDoubleSum() - .isDelta() - .points() - .hasSize(1) - .isNotEmpty() - .contains(ImmutableDoublePointData.create(0, 10, attr1, 3)); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(0) + .hasEpochNanos(10) + .hasAttributes(attr1) + .hasValue(3))); // Send in new measurement at time 20 for collector 1, with attr2 // Result should drop accumulation for attr1, only reporting accumulation for attr2 @@ -582,12 +576,16 @@ void asynchronousDelta_dropsStale() { measurement2, 0, 20)) - .hasDoubleSum() - .isDelta() - .points() - .hasSize(1) - .isNotEmpty() - .containsExactly(ImmutableDoublePointData.create(10, 20, attr2, 7)); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> + point + .hasStartEpochNanos(10) + .hasEpochNanos(20) + .hasAttributes(attr2) + .hasValue(7))); } @Test @@ -606,12 +604,11 @@ void asynchronousDelta_diffsLastTimestamp() { createMeasurement(3), 0, 10)) - .hasDoubleSum() - .isDelta() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3)); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3))); // Send in new measurement at time 30 for collector 1 assertThat( storage.buildMetricFor( @@ -623,12 +620,11 @@ void asynchronousDelta_diffsLastTimestamp() { createMeasurement(3), 0, 30)) - .hasDoubleSum() - .isDelta() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(10).hasEpochNanos(30).hasValue(0)); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(10).hasEpochNanos(30).hasValue(0))); // Send in new measurement at time 40 for collector 2 assertThat( storage.buildMetricFor( @@ -640,12 +636,11 @@ void asynchronousDelta_diffsLastTimestamp() { createMeasurement(4), 0, 60)) - .hasDoubleSum() - .isDelta() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4)); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4))); // Send in new measurement at time 35 for collector 1 assertThat( storage.buildMetricFor( @@ -657,12 +652,11 @@ void asynchronousDelta_diffsLastTimestamp() { createMeasurement(2), 0, 35)) - .hasDoubleSum() - .isDelta() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(30).hasEpochNanos(35).hasValue(-1)); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(30).hasEpochNanos(35).hasValue(-1))); } @Test @@ -681,12 +675,11 @@ void asynchronous_DeltaAndCumulative() { createMeasurement(3), 0, 10)) - .hasDoubleSum() - .isDelta() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3)); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3))); // Send in new measurement at time 30 for collector 1 assertThat( storage.buildMetricFor( @@ -698,12 +691,11 @@ void asynchronous_DeltaAndCumulative() { createMeasurement(3), 0, 30)) - .hasDoubleSum() - .isDelta() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(10).hasEpochNanos(30).hasValue(0)); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(10).hasEpochNanos(30).hasValue(0))); // Send in new measurement at time 40 for collector 2 assertThat( storage.buildMetricFor( @@ -715,12 +707,11 @@ void asynchronous_DeltaAndCumulative() { createMeasurement(4), 0, 60)) - .hasDoubleSum() - .isCumulative() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4)); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4))); // Send in new measurement at time 35 for collector 1 assertThat( storage.buildMetricFor( @@ -732,12 +723,11 @@ void asynchronous_DeltaAndCumulative() { createMeasurement(2), 0, 35)) - .hasDoubleSum() - .isDelta() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(30).hasEpochNanos(35).hasValue(-1)); + .hasDoubleSumSatisfying( + sum -> + sum.isDelta() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(30).hasEpochNanos(35).hasValue(-1))); // Send in new measurement at time 60 for collector 2 assertThat( @@ -750,11 +740,10 @@ void asynchronous_DeltaAndCumulative() { createMeasurement(5), 0, 60)) - .hasDoubleSum() - .isCumulative() - .points() - .isNotEmpty() - .satisfiesExactly( - point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(60).hasValue(5)); + .hasDoubleSumSatisfying( + sum -> + sum.isCumulative() + .hasPointsSatisfying( + point -> point.hasStartEpochNanos(0).hasEpochNanos(60).hasValue(5))); } } diff --git a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/HistogramPointAssert.java b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/HistogramPointAssert.java index f6ca2217c00..7219b6c1dbb 100644 --- a/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/HistogramPointAssert.java +++ b/sdk/testing/src/main/java/io/opentelemetry/sdk/testing/assertj/HistogramPointAssert.java @@ -5,8 +5,12 @@ package io.opentelemetry.sdk.testing.assertj; +import static org.assertj.core.api.Assertions.assertThat; + +import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.HistogramPointData; import java.util.Arrays; +import java.util.function.Consumer; import org.assertj.core.api.Assertions; /** Test assertions for {@link HistogramPointData}. */ @@ -77,4 +81,30 @@ public HistogramPointAssert hasBucketCounts(long... counts) { Assertions.assertThat(actual.getCounts()).as("bucketCounts").containsExactly(bigCounts); return this; } + + /** Asserts the point has the specified exemplars, in any order. */ + public HistogramPointAssert hasExemplars(DoubleExemplarData... exemplars) { + isNotNull(); + Assertions.assertThat(actual.getExemplars()) + .as("exemplars") + .containsExactlyInAnyOrder(exemplars); + return myself; + } + + /** Asserts the point has exemplars matching all of the assertions, in any order. */ + @SafeVarargs + @SuppressWarnings("varargs") + public final HistogramPointAssert hasExemplarsSatisfying( + Consumer... assertions) { + return hasExemplarsSatisfying(Arrays.asList(assertions)); + } + + /** Asserts the point has exemplars matching all of the assertions, in any order. */ + public HistogramPointAssert hasExemplarsSatisfying( + Iterable> assertions) { + isNotNull(); + assertThat(actual.getExemplars()) + .satisfiesExactlyInAnyOrder(AssertUtil.toConsumers(assertions, DoubleExemplarAssert::new)); + return myself; + } } diff --git a/sdk/trace/build.gradle.kts b/sdk/trace/build.gradle.kts index 2bcad32b02e..88afa8a2e29 100644 --- a/sdk/trace/build.gradle.kts +++ b/sdk/trace/build.gradle.kts @@ -41,7 +41,6 @@ dependencies { // dependencies. isTransitive = false } - jmh(project(":sdk:metrics-testing")) jmh(project(":exporters:jaeger-thrift")) jmh(project(":exporters:otlp:trace")) { // The opentelemetry-exporter-otlp-trace depends on this project itself. So don"t pull in