From a6a4237db3efb1f7ad069de6e16fd2f22dcd0b98 Mon Sep 17 00:00:00 2001 From: David Ashpole Date: Wed, 6 Mar 2024 17:55:05 +0000 Subject: [PATCH 1/6] add fixture with exemplar --- .../testdata/fixtures/metrics/histogram.json | 32 +++++++++++++++++ .../fixtures/metrics/histogram_expect.json | 36 +++++++++++++++++++ .../metrics/histogram_gmp_expect.json | 36 +++++++++++++++++++ 3 files changed, 104 insertions(+) diff --git a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram.json b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram.json index ab6a67485..50de5ebba 100644 --- a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram.json +++ b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram.json @@ -61,6 +61,22 @@ 10, 20, 50 + ], + "exemplars": [ + { + "asInt": 1, + "timeUnixNano": "1649443516286000000", + "traceId": "2499d13df7b35ae93dfa3d6ddc01da74", + "spanId": "84f9e8929bb1cd5b", + "filteredAttributes": [ + { + "key": "filtered.attribute", + "value": { + "stringValue": "foobar" + } + } + ] + } ] }, { @@ -92,6 +108,22 @@ 10, 20, 50 + ], + "exemplars": [ + { + "asDouble": 10.5, + "timeUnixNano": "1649443516286000000", + "traceId": "2499d13df7b35ae93dfa3d6ddc01da74", + "spanId": "84f9e8929bb1cd5b", + "filteredAttributes": [ + { + "key": "filtered.attribute", + "value": { + "stringValue": "foobar" + } + } + ] + } ] } ], diff --git a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_expect.json b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_expect.json index 56a5ad433..8eddcc540 100644 --- a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_expect.json +++ b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_expect.json @@ -52,6 +52,24 @@ "0", "0", "0" + ], + "exemplars": [ + { + "value": 1, + "timestamp": "2022-04-08T18:45:16.286Z", + "attachments": [ + { + "@type": "type.googleapis.com/google.monitoring.v3.SpanContext", + "spanName": "projects/fakeprojectid/traces/2499d13df7b35ae93dfa3d6ddc01da74/spans/84f9e8929bb1cd5b" + }, + { + "@type": "type.googleapis.com/google.monitoring.v3.DroppedLabels", + "label": { + "filtered_attribute": "foobar" + } + } + ] + } ] } } @@ -108,6 +126,24 @@ "1", "0", "0" + ], + "exemplars": [ + { + "value": 10.5, + "timestamp": "2022-04-08T18:45:16.286Z", + "attachments": [ + { + "@type": "type.googleapis.com/google.monitoring.v3.SpanContext", + "spanName": "projects/fakeprojectid/traces/2499d13df7b35ae93dfa3d6ddc01da74/spans/84f9e8929bb1cd5b" + }, + { + "@type": "type.googleapis.com/google.monitoring.v3.DroppedLabels", + "label": { + "filtered_attribute": "foobar" + } + } + ] + } ] } } diff --git a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_gmp_expect.json b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_gmp_expect.json index 6e9a6eafb..23289b988 100644 --- a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_gmp_expect.json +++ b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_gmp_expect.json @@ -53,6 +53,24 @@ "0", "0", "0" + ], + "exemplars": [ + { + "value": 1, + "timestamp": "2022-04-08T18:45:16.286Z", + "attachments": [ + { + "@type": "type.googleapis.com/google.monitoring.v3.SpanContext", + "spanName": "projects/fakeprojectid/traces/2499d13df7b35ae93dfa3d6ddc01da74/spans/84f9e8929bb1cd5b" + }, + { + "@type": "type.googleapis.com/google.monitoring.v3.DroppedLabels", + "label": { + "filtered_attribute": "foobar" + } + } + ] + } ] } } @@ -110,6 +128,24 @@ "1", "0", "0" + ], + "exemplars": [ + { + "value": 10.5, + "timestamp": "2022-04-08T18:45:16.286Z", + "attachments": [ + { + "@type": "type.googleapis.com/google.monitoring.v3.SpanContext", + "spanName": "projects/fakeprojectid/traces/2499d13df7b35ae93dfa3d6ddc01da74/spans/84f9e8929bb1cd5b" + }, + { + "@type": "type.googleapis.com/google.monitoring.v3.DroppedLabels", + "label": { + "filtered_attribute": "foobar" + } + } + ] + } ] } } From e5a7b57958c061cc61c2ab3db15137ba87186670 Mon Sep 17 00:00:00 2001 From: David Ashpole Date: Wed, 6 Mar 2024 20:15:32 +0000 Subject: [PATCH 2/6] implement conversion for histogram exemplars --- .../integrationtest/testcases/conversion.go | 76 +++++++++++++++++++ 1 file changed, 76 insertions(+) diff --git a/exporter/collector/integrationtest/testcases/conversion.go b/exporter/collector/integrationtest/testcases/conversion.go index c7b0a2924..8e29ef87c 100644 --- a/exporter/collector/integrationtest/testcases/conversion.go +++ b/exporter/collector/integrationtest/testcases/conversion.go @@ -149,6 +149,22 @@ func convertIntDataPoints(pts pmetric.NumberDataPointSlice) []metricdata.DataPoi } func convertHistogram(h pmetric.Histogram) metricdata.Aggregation { + for i := 0; i < h.DataPoints().Len(); i++ { + pt := h.DataPoints().At(i) + for j := 0; j < pt.Exemplars().Len(); j++ { + switch pt.Exemplars().At(j).ValueType() { + case pmetric.ExemplarValueTypeDouble: + return convertFloatHistogram(h) + case pmetric.ExemplarValueTypeInt: + return convertIntHistogram(h) + } + } + } + // The sum is always a float, so default to treating it as a float histogram. + return convertFloatHistogram(h) +} + +func convertFloatHistogram(h pmetric.Histogram) metricdata.Aggregation { if h.DataPoints().Len() == 0 { return nil } @@ -166,11 +182,71 @@ func convertHistogram(h pmetric.Histogram) metricdata.Aggregation { Sum: pt.Sum(), Bounds: pt.ExplicitBounds().AsRaw(), BucketCounts: pt.BucketCounts().AsRaw(), + Exemplars: convertFloatExemplars(pt.Exemplars()), } } return agg } +func convertIntHistogram(h pmetric.Histogram) metricdata.Aggregation { + if h.DataPoints().Len() == 0 { + return nil + } + agg := metricdata.Histogram[int64]{ + Temporality: convertTemporality(h.AggregationTemporality()), + DataPoints: make([]metricdata.HistogramDataPoint[int64], h.DataPoints().Len()), + } + for i := 0; i < h.DataPoints().Len(); i++ { + pt := h.DataPoints().At(i) + agg.DataPoints[i] = metricdata.HistogramDataPoint[int64]{ + Attributes: attribute.NewSet(convertAttributes(pt.Attributes())...), + StartTime: pt.StartTimestamp().AsTime(), + Time: pt.Timestamp().AsTime(), + Count: pt.Count(), + Sum: int64(pt.Sum()), + Bounds: pt.ExplicitBounds().AsRaw(), + BucketCounts: pt.BucketCounts().AsRaw(), + Exemplars: convertIntExemplars(pt.Exemplars()), + } + } + return agg +} + +func convertIntExemplars(es pmetric.ExemplarSlice) []metricdata.Exemplar[int64] { + exemplars := make([]metricdata.Exemplar[int64], es.Len()) + for i := 0; i < es.Len(); i++ { + e := es.At(i) + traceID := e.TraceID() + spanID := e.SpanID() + exemplars[i] = metricdata.Exemplar[int64]{ + FilteredAttributes: convertAttributes(e.FilteredAttributes()), + Time: e.Timestamp().AsTime(), + TraceID: []byte(traceID[:]), + SpanID: []byte(spanID[:]), + Value: e.IntValue(), + } + } + return exemplars +} + +func convertFloatExemplars(es pmetric.ExemplarSlice) []metricdata.Exemplar[float64] { + exemplars := make([]metricdata.Exemplar[float64], es.Len()) + for i := 0; i < es.Len(); i++ { + e := es.At(i) + traceID := e.TraceID() + spanID := e.SpanID() + exemplars[i] = metricdata.Exemplar[float64]{ + FilteredAttributes: convertAttributes(e.FilteredAttributes()), + Time: e.Timestamp().AsTime(), + TraceID: []byte(traceID[:]), + SpanID: []byte(spanID[:]), + Value: e.DoubleValue(), + } + } + return exemplars + +} + func convertAttributes(attrs pcommon.Map) []attribute.KeyValue { var kvs []attribute.KeyValue attrs.Range(func(k string, v pcommon.Value) bool { From 5163100e6f335f67e91d8ca53cce555ee4fb25d0 Mon Sep 17 00:00:00 2001 From: David Ashpole Date: Wed, 6 Mar 2024 20:44:29 +0000 Subject: [PATCH 3/6] implement exemplar attributes for histograms in sdk exporter --- .../testdata/fixtures/metrics/histogram.json | 107 ++++++++++- .../fixtures/metrics/histogram_expect.json | 180 +++++++++++++++++- .../metrics/histogram_gmp_expect.json | 158 ++++++++++++++- exporter/metric/metric.go | 72 +++++-- 4 files changed, 488 insertions(+), 29 deletions(-) diff --git a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram.json b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram.json index 50de5ebba..219a9b96d 100644 --- a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram.json +++ b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram.json @@ -28,7 +28,110 @@ "scope": {}, "metrics": [ { - "name": "simple.histogram", + "name": "simple.float64.histogram", + "unit": "s", + "histogram": { + "dataPoints": [ + { + "attributes": [ + { + "key": "some.lemons", + "value": { + "stringValue": "13" + } + } + ], + "startTimeUnixNano": "1649443516286000000", + "timeUnixNano": "1649443516286000000", + "count": "1", + "sum": 2.5, + "bucketCounts": [ + "0", + "0", + "1", + "0", + "0", + "0", + "0" + ], + "explicitBounds": [ + 1, + 2, + 5, + 10, + 20, + 50 + ], + "exemplars": [ + { + "asDouble": 1.1, + "timeUnixNano": "1649443516286000000", + "traceId": "2499d13df7b35ae93dfa3d6ddc01da74", + "spanId": "84f9e8929bb1cd5b", + "filteredAttributes": [ + { + "key": "filtered.attribute", + "value": { + "stringValue": "foobar" + } + } + ] + } + ] + }, + { + "attributes": [ + { + "key": "some.lemons", + "value": { + "stringValue": "10" + } + } + ], + "startTimeUnixNano": "1649443516286000000", + "timeUnixNano": "1649443516286000000", + "count": "2", + "sum": 14.3, + "bucketCounts": [ + "0", + "0", + "1", + "0", + "1", + "0", + "0" + ], + "explicitBounds": [ + 1, + 2, + 5, + 10, + 20, + 50 + ], + "exemplars": [ + { + "asDouble": 10.5, + "timeUnixNano": "1649443516286000000", + "traceId": "2499d13df7b35ae93dfa3d6ddc01da74", + "spanId": "84f9e8929bb1cd5b", + "filteredAttributes": [ + { + "key": "filtered.attribute", + "value": { + "stringValue": "foobar" + } + } + ] + } + ] + } + ], + "aggregationTemporality": "AGGREGATION_TEMPORALITY_CUMULATIVE" + } + }, + { + "name": "simple.int64.histogram", "unit": "s", "histogram": { "dataPoints": [ @@ -111,7 +214,7 @@ ], "exemplars": [ { - "asDouble": 10.5, + "asInt": 3, "timeUnixNano": "1649443516286000000", "traceId": "2499d13df7b35ae93dfa3d6ddc01da74", "spanId": "84f9e8929bb1cd5b", diff --git a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_expect.json b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_expect.json index 8eddcc540..ba0026898 100644 --- a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_expect.json +++ b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_expect.json @@ -5,7 +5,155 @@ "timeSeries": [ { "metric": { - "type": "workload.googleapis.com/simple.histogram", + "type": "workload.googleapis.com/simple.float64.histogram", + "labels": { + "some_lemons": "13" + } + }, + "resource": { + "type": "generic_task", + "labels": { + "job": "demo", + "location": "us-central1-c", + "namespace": "", + "task_id": "10.92.5.2:15692" + } + }, + "metricKind": "CUMULATIVE", + "valueType": "DISTRIBUTION", + "points": [ + { + "interval": { + "endTime": "1970-01-01T00:00:00Z", + "startTime": "1970-01-01T00:00:00Z" + }, + "value": { + "distributionValue": { + "count": "1", + "mean": 2.5, + "sumOfSquaredDeviation": 1, + "bucketOptions": { + "explicitBuckets": { + "bounds": [ + 1, + 2, + 5, + 10, + 20, + 50 + ] + } + }, + "bucketCounts": [ + "0", + "0", + "1", + "0", + "0", + "0", + "0" + ], + "exemplars": [ + { + "value": 1.1, + "timestamp": "2022-04-08T18:45:16.286Z", + "attachments": [ + { + "@type": "type.googleapis.com/google.monitoring.v3.SpanContext", + "spanName": "projects/fakeprojectid/traces/2499d13df7b35ae93dfa3d6ddc01da74/spans/84f9e8929bb1cd5b" + }, + { + "@type": "type.googleapis.com/google.monitoring.v3.DroppedLabels", + "label": { + "filtered_attribute": "foobar" + } + } + ] + } + ] + } + } + } + ], + "unit": "s" + }, + { + "metric": { + "type": "workload.googleapis.com/simple.float64.histogram", + "labels": { + "some_lemons": "10" + } + }, + "resource": { + "type": "generic_task", + "labels": { + "job": "demo", + "location": "us-central1-c", + "namespace": "", + "task_id": "10.92.5.2:15692" + } + }, + "metricKind": "CUMULATIVE", + "valueType": "DISTRIBUTION", + "points": [ + { + "interval": { + "endTime": "1970-01-01T00:00:00Z", + "startTime": "1970-01-01T00:00:00Z" + }, + "value": { + "distributionValue": { + "count": "2", + "mean": 7.15, + "sumOfSquaredDeviation": 74.945, + "bucketOptions": { + "explicitBuckets": { + "bounds": [ + 1, + 2, + 5, + 10, + 20, + 50 + ] + } + }, + "bucketCounts": [ + "0", + "0", + "1", + "0", + "1", + "0", + "0" + ], + "exemplars": [ + { + "value": 10.5, + "timestamp": "2022-04-08T18:45:16.286Z", + "attachments": [ + { + "@type": "type.googleapis.com/google.monitoring.v3.SpanContext", + "spanName": "projects/fakeprojectid/traces/2499d13df7b35ae93dfa3d6ddc01da74/spans/84f9e8929bb1cd5b" + }, + { + "@type": "type.googleapis.com/google.monitoring.v3.DroppedLabels", + "label": { + "filtered_attribute": "foobar" + } + } + ] + } + ] + } + } + } + ], + "unit": "s" + }, + { + "metric": { + "type": "workload.googleapis.com/simple.int64.histogram", "labels": { "some_lemons": "13" } @@ -79,7 +227,7 @@ }, { "metric": { - "type": "workload.googleapis.com/simple.histogram", + "type": "workload.googleapis.com/simple.int64.histogram", "labels": { "some_lemons": "10" } @@ -129,7 +277,7 @@ ], "exemplars": [ { - "value": 10.5, + "value": 3, "timestamp": "2022-04-08T18:45:16.286Z", "attachments": [ { @@ -158,8 +306,8 @@ { "name": "projects/fakeprojectid", "metricDescriptor": { - "name": "simple.histogram", - "type": "workload.googleapis.com/simple.histogram", + "name": "simple.float64.histogram", + "type": "workload.googleapis.com/simple.float64.histogram", "labels": [ { "key": "some_lemons" @@ -168,7 +316,23 @@ "metricKind": "CUMULATIVE", "valueType": "DISTRIBUTION", "unit": "s", - "displayName": "simple.histogram" + "displayName": "simple.float64.histogram" + } + }, + { + "name": "projects/fakeprojectid", + "metricDescriptor": { + "name": "simple.int64.histogram", + "type": "workload.googleapis.com/simple.int64.histogram", + "labels": [ + { + "key": "some_lemons" + } + ], + "metricKind": "CUMULATIVE", + "valueType": "DISTRIBUTION", + "unit": "s", + "displayName": "simple.int64.histogram" } } ], @@ -194,7 +358,7 @@ "startTime": "1970-01-01T00:00:00Z" }, "value": { - "int64Value": "2" + "int64Value": "4" } } ] @@ -217,7 +381,7 @@ "startTime": "1970-01-01T00:00:00Z" }, "value": { - "int64Value": "1" + "int64Value": "2" } } ] diff --git a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_gmp_expect.json b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_gmp_expect.json index 23289b988..b07eb3b55 100644 --- a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_gmp_expect.json +++ b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_gmp_expect.json @@ -5,7 +5,157 @@ "timeSeries": [ { "metric": { - "type": "prometheus.googleapis.com/simple_histogram_seconds/histogram", + "type": "prometheus.googleapis.com/simple_float64_histogram_seconds/histogram", + "labels": { + "some_lemons": "13" + } + }, + "resource": { + "type": "prometheus_target", + "labels": { + "cluster": "", + "instance": "10.92.5.2:15692", + "job": "demo", + "location": "us-central1-c", + "namespace": "" + } + }, + "metricKind": "CUMULATIVE", + "valueType": "DISTRIBUTION", + "points": [ + { + "interval": { + "endTime": "1970-01-01T00:00:00Z", + "startTime": "1970-01-01T00:00:00Z" + }, + "value": { + "distributionValue": { + "count": "1", + "mean": 2.5, + "sumOfSquaredDeviation": 1, + "bucketOptions": { + "explicitBuckets": { + "bounds": [ + 1, + 2, + 5, + 10, + 20, + 50 + ] + } + }, + "bucketCounts": [ + "0", + "0", + "1", + "0", + "0", + "0", + "0" + ], + "exemplars": [ + { + "value": 1.1, + "timestamp": "2022-04-08T18:45:16.286Z", + "attachments": [ + { + "@type": "type.googleapis.com/google.monitoring.v3.SpanContext", + "spanName": "projects/fakeprojectid/traces/2499d13df7b35ae93dfa3d6ddc01da74/spans/84f9e8929bb1cd5b" + }, + { + "@type": "type.googleapis.com/google.monitoring.v3.DroppedLabels", + "label": { + "filtered_attribute": "foobar" + } + } + ] + } + ] + } + } + } + ], + "unit": "s" + }, + { + "metric": { + "type": "prometheus.googleapis.com/simple_float64_histogram_seconds/histogram", + "labels": { + "some_lemons": "10" + } + }, + "resource": { + "type": "prometheus_target", + "labels": { + "cluster": "", + "instance": "10.92.5.2:15692", + "job": "demo", + "location": "us-central1-c", + "namespace": "" + } + }, + "metricKind": "CUMULATIVE", + "valueType": "DISTRIBUTION", + "points": [ + { + "interval": { + "endTime": "1970-01-01T00:00:00Z", + "startTime": "1970-01-01T00:00:00Z" + }, + "value": { + "distributionValue": { + "count": "2", + "mean": 7.15, + "sumOfSquaredDeviation": 74.945, + "bucketOptions": { + "explicitBuckets": { + "bounds": [ + 1, + 2, + 5, + 10, + 20, + 50 + ] + } + }, + "bucketCounts": [ + "0", + "0", + "1", + "0", + "1", + "0", + "0" + ], + "exemplars": [ + { + "value": 10.5, + "timestamp": "2022-04-08T18:45:16.286Z", + "attachments": [ + { + "@type": "type.googleapis.com/google.monitoring.v3.SpanContext", + "spanName": "projects/fakeprojectid/traces/2499d13df7b35ae93dfa3d6ddc01da74/spans/84f9e8929bb1cd5b" + }, + { + "@type": "type.googleapis.com/google.monitoring.v3.DroppedLabels", + "label": { + "filtered_attribute": "foobar" + } + } + ] + } + ] + } + } + } + ], + "unit": "s" + }, + { + "metric": { + "type": "prometheus.googleapis.com/simple_int64_histogram_seconds/histogram", "labels": { "some_lemons": "13" } @@ -80,7 +230,7 @@ }, { "metric": { - "type": "prometheus.googleapis.com/simple_histogram_seconds/histogram", + "type": "prometheus.googleapis.com/simple_int64_histogram_seconds/histogram", "labels": { "some_lemons": "10" } @@ -131,7 +281,7 @@ ], "exemplars": [ { - "value": 10.5, + "value": 3, "timestamp": "2022-04-08T18:45:16.286Z", "attachments": [ { @@ -205,7 +355,7 @@ "startTime": "1970-01-01T00:00:00Z" }, "value": { - "int64Value": "3" + "int64Value": "5" } } ] diff --git a/exporter/metric/metric.go b/exporter/metric/metric.go index f62507221..06b8a8b27 100644 --- a/exporter/metric/metric.go +++ b/exporter/metric/metric.go @@ -15,7 +15,9 @@ package metric import ( + "bytes" "context" + "encoding/hex" "errors" "fmt" "math" @@ -32,6 +34,7 @@ import ( "go.opentelemetry.io/otel/sdk/metric" "go.opentelemetry.io/otel/sdk/metric/metricdata" "go.opentelemetry.io/otel/sdk/resource" + "go.opentelemetry.io/otel/trace" monitoring "cloud.google.com/go/monitoring/apiv3/v2" "cloud.google.com/go/monitoring/apiv3/v2/monitoringpb" @@ -44,6 +47,7 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/encoding/gzip" "google.golang.org/grpc/metadata" + "google.golang.org/protobuf/types/known/anypb" "google.golang.org/protobuf/types/known/timestamppb" "github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping" @@ -488,7 +492,7 @@ func (me *metricExporter) recordToTspb(m metricdata.Metrics, mr *monitoredrespb. } case metricdata.Histogram[int64]: for _, point := range a.DataPoints { - ts, err := histogramToTimeSeries(point, m, mr, me.o.enableSumOfSquaredDeviation) + ts, err := histogramToTimeSeries(point, m, mr, me.o.enableSumOfSquaredDeviation, me.o.projectID) if err != nil { errs = append(errs, err) continue @@ -498,7 +502,7 @@ func (me *metricExporter) recordToTspb(m metricdata.Metrics, mr *monitoredrespb. } case metricdata.Histogram[float64]: for _, point := range a.DataPoints { - ts, err := histogramToTimeSeries(point, m, mr, me.o.enableSumOfSquaredDeviation) + ts, err := histogramToTimeSeries(point, m, mr, me.o.enableSumOfSquaredDeviation, me.o.projectID) if err != nil { errs = append(errs, err) continue @@ -508,7 +512,7 @@ func (me *metricExporter) recordToTspb(m metricdata.Metrics, mr *monitoredrespb. } case metricdata.ExponentialHistogram[int64]: for _, point := range a.DataPoints { - ts, err := expHistogramToTimeSeries(point, m, mr, me.o.enableSumOfSquaredDeviation) + ts, err := expHistogramToTimeSeries(point, m, mr, me.o.enableSumOfSquaredDeviation, me.o.projectID) if err != nil { errs = append(errs, err) continue @@ -518,7 +522,7 @@ func (me *metricExporter) recordToTspb(m metricdata.Metrics, mr *monitoredrespb. } case metricdata.ExponentialHistogram[float64]: for _, point := range a.DataPoints { - ts, err := expHistogramToTimeSeries(point, m, mr, me.o.enableSumOfSquaredDeviation) + ts, err := expHistogramToTimeSeries(point, m, mr, me.o.enableSumOfSquaredDeviation, me.o.projectID) if err != nil { errs = append(errs, err) continue @@ -596,12 +600,12 @@ func sumToTimeSeries[N int64 | float64](point metricdata.DataPoint[N], metrics m // TODO(@dashpole): Refactor to pass control-coupling lint check. // //nolint:revive -func histogramToTimeSeries[N int64 | float64](point metricdata.HistogramDataPoint[N], metrics metricdata.Metrics, mr *monitoredrespb.MonitoredResource, enableSOSD bool) (*monitoringpb.TimeSeries, error) { +func histogramToTimeSeries[N int64 | float64](point metricdata.HistogramDataPoint[N], metrics metricdata.Metrics, mr *monitoredrespb.MonitoredResource, enableSOSD bool, projectID string) (*monitoringpb.TimeSeries, error) { interval, err := toNonemptyTimeIntervalpb(point.StartTime, point.Time) if err != nil { return nil, err } - distributionValue := histToDistribution(point) + distributionValue := histToDistribution(point, projectID) if enableSOSD { setSumOfSquaredDeviation(point, distributionValue) } @@ -621,12 +625,12 @@ func histogramToTimeSeries[N int64 | float64](point metricdata.HistogramDataPoin }, nil } -func expHistogramToTimeSeries[N int64 | float64](point metricdata.ExponentialHistogramDataPoint[N], metrics metricdata.Metrics, mr *monitoredrespb.MonitoredResource, enableSOSD bool) (*monitoringpb.TimeSeries, error) { +func expHistogramToTimeSeries[N int64 | float64](point metricdata.ExponentialHistogramDataPoint[N], metrics metricdata.Metrics, mr *monitoredrespb.MonitoredResource, enableSOSD bool, projectID string) (*monitoringpb.TimeSeries, error) { interval, err := toNonemptyTimeIntervalpb(point.StartTime, point.Time) if err != nil { return nil, err } - distributionValue := expHistToDistribution(point) + distributionValue := expHistToDistribution(point, projectID) // TODO: Implement "setSumOfSquaredDeviationExpHist" for parameter "enableSOSD" functionality. return &monitoringpb.TimeSeries{ Resource: mr, @@ -667,7 +671,7 @@ func toNonemptyTimeIntervalpb(start, end time.Time) (*monitoringpb.TimeInterval, }, nil } -func histToDistribution[N int64 | float64](hist metricdata.HistogramDataPoint[N]) *distribution.Distribution { +func histToDistribution[N int64 | float64](hist metricdata.HistogramDataPoint[N], projectID string) *distribution.Distribution { counts := make([]int64, len(hist.BucketCounts)) for i, v := range hist.BucketCounts { counts[i] = int64(v) @@ -687,11 +691,11 @@ func histToDistribution[N int64 | float64](hist metricdata.HistogramDataPoint[N] }, }, }, - Exemplars: toDistributionExemplar[N](hist.Exemplars), + Exemplars: toDistributionExemplar[N](hist.Exemplars, projectID), } } -func expHistToDistribution[N int64 | float64](hist metricdata.ExponentialHistogramDataPoint[N]) *distribution.Distribution { +func expHistToDistribution[N int64 | float64](hist metricdata.ExponentialHistogramDataPoint[N], projectID string) *distribution.Distribution { // First calculate underflow bucket with all negatives + zeros. underflow := hist.ZeroCount negativeBuckets := hist.NegativeBucket.Counts @@ -741,15 +745,36 @@ func expHistToDistribution[N int64 | float64](hist metricdata.ExponentialHistogr Mean: mean, BucketCounts: counts, BucketOptions: bucketOptions, - Exemplars: toDistributionExemplar[N](hist.Exemplars), + Exemplars: toDistributionExemplar[N](hist.Exemplars, projectID), } } -func toDistributionExemplar[N int64 | float64](Exemplars []metricdata.Exemplar[N]) []*distribution.Distribution_Exemplar { +func toDistributionExemplar[N int64 | float64](Exemplars []metricdata.Exemplar[N], projectID string) []*distribution.Distribution_Exemplar { var exemplars []*distribution.Distribution_Exemplar for _, e := range Exemplars { - // TODO: Add context []attachments. See https://cloud.google.com/monitoring/api/ref_v3/rest/v3/TypedValue#exemplar - exemplars = append(exemplars, &distribution.Distribution_Exemplar{Value: float64(e.Value), Timestamp: timestamppb.New(e.Time)}) + attachments := []*anypb.Any{} + if hasValidSpanContext(e) { + traceID, spanID := e.TraceID, e.SpanID + sctx, err := anypb.New(&monitoringpb.SpanContext{ + SpanName: fmt.Sprintf("projects/%s/traces/%s/spans/%s", projectID, hex.EncodeToString(traceID[:]), hex.EncodeToString(spanID[:])), + }) + if err == nil { + attachments = append(attachments, sctx) + } + } + if len(e.FilteredAttributes) > 0 { + attr, err := anypb.New(&monitoringpb.DroppedLabels{ + Label: attributesToLabels(e.FilteredAttributes), + }) + if err == nil { + attachments = append(attachments, attr) + } + } + exemplars = append(exemplars, &distribution.Distribution_Exemplar{ + Value: float64(e.Value), + Timestamp: timestamppb.New(e.Time), + Attachments: attachments, + }) } sort.Slice(exemplars, func(i, j int) bool { return exemplars[i].Value < exemplars[j].Value @@ -757,6 +782,23 @@ func toDistributionExemplar[N int64 | float64](Exemplars []metricdata.Exemplar[N return exemplars } +func attributesToLabels(attrs []attribute.KeyValue) map[string]string { + labels := make(map[string]string, len(attrs)) + for _, attr := range attrs { + labels[normalizeLabelKey(string(attr.Key))] = sanitizeUTF8(attr.Value.Emit()) + } + return labels +} + +var ( + nilTraceID trace.TraceID + nilSpanID trace.SpanID +) + +func hasValidSpanContext[N int64 | float64](e metricdata.Exemplar[N]) bool { + return !bytes.Equal(e.TraceID[:], nilTraceID[:]) && !bytes.Equal(e.SpanID[:], nilSpanID[:]) +} + func setSumOfSquaredDeviation[N int64 | float64](hist metricdata.HistogramDataPoint[N], dist *distribution.Distribution) { var prevBound float64 // Calculate the sum of squared deviation. From 5c4603efed8d0e8ddebd066568598bcd887c7f66 Mon Sep 17 00:00:00 2001 From: David Ashpole Date: Wed, 6 Mar 2024 21:10:26 +0000 Subject: [PATCH 4/6] small fixes --- .../collector/integrationtest/testcases/conversion.go | 1 - .../testdata/fixtures/metrics/histogram.json | 8 ++++---- .../testdata/fixtures/metrics/histogram_expect.json | 8 ++++---- .../testdata/fixtures/metrics/histogram_gmp_expect.json | 8 ++++---- exporter/metric/go.mod | 6 ++++-- 5 files changed, 16 insertions(+), 15 deletions(-) diff --git a/exporter/collector/integrationtest/testcases/conversion.go b/exporter/collector/integrationtest/testcases/conversion.go index 8e29ef87c..6beaff31d 100644 --- a/exporter/collector/integrationtest/testcases/conversion.go +++ b/exporter/collector/integrationtest/testcases/conversion.go @@ -244,7 +244,6 @@ func convertFloatExemplars(es pmetric.ExemplarSlice) []metricdata.Exemplar[float } } return exemplars - } func convertAttributes(attrs pcommon.Map) []attribute.KeyValue { diff --git a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram.json b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram.json index 219a9b96d..cad026144 100644 --- a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram.json +++ b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram.json @@ -64,7 +64,7 @@ ], "exemplars": [ { - "asDouble": 1.1, + "asDouble": 4.1, "timeUnixNano": "1649443516286000000", "traceId": "2499d13df7b35ae93dfa3d6ddc01da74", "spanId": "84f9e8929bb1cd5b", @@ -111,7 +111,7 @@ ], "exemplars": [ { - "asDouble": 10.5, + "asDouble": 15.5, "timeUnixNano": "1649443516286000000", "traceId": "2499d13df7b35ae93dfa3d6ddc01da74", "spanId": "84f9e8929bb1cd5b", @@ -167,7 +167,7 @@ ], "exemplars": [ { - "asInt": 1, + "asInt": 4, "timeUnixNano": "1649443516286000000", "traceId": "2499d13df7b35ae93dfa3d6ddc01da74", "spanId": "84f9e8929bb1cd5b", @@ -214,7 +214,7 @@ ], "exemplars": [ { - "asInt": 3, + "asInt": 12, "timeUnixNano": "1649443516286000000", "traceId": "2499d13df7b35ae93dfa3d6ddc01da74", "spanId": "84f9e8929bb1cd5b", diff --git a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_expect.json b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_expect.json index ba0026898..a424ab9fc 100644 --- a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_expect.json +++ b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_expect.json @@ -55,7 +55,7 @@ ], "exemplars": [ { - "value": 1.1, + "value": 4.1, "timestamp": "2022-04-08T18:45:16.286Z", "attachments": [ { @@ -129,7 +129,7 @@ ], "exemplars": [ { - "value": 10.5, + "value": 15.5, "timestamp": "2022-04-08T18:45:16.286Z", "attachments": [ { @@ -203,7 +203,7 @@ ], "exemplars": [ { - "value": 1, + "value": 4, "timestamp": "2022-04-08T18:45:16.286Z", "attachments": [ { @@ -277,7 +277,7 @@ ], "exemplars": [ { - "value": 3, + "value": 12, "timestamp": "2022-04-08T18:45:16.286Z", "attachments": [ { diff --git a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_gmp_expect.json b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_gmp_expect.json index b07eb3b55..be7448098 100644 --- a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_gmp_expect.json +++ b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_gmp_expect.json @@ -56,7 +56,7 @@ ], "exemplars": [ { - "value": 1.1, + "value": 4.1, "timestamp": "2022-04-08T18:45:16.286Z", "attachments": [ { @@ -131,7 +131,7 @@ ], "exemplars": [ { - "value": 10.5, + "value": 15.5, "timestamp": "2022-04-08T18:45:16.286Z", "attachments": [ { @@ -206,7 +206,7 @@ ], "exemplars": [ { - "value": 1, + "value": 4, "timestamp": "2022-04-08T18:45:16.286Z", "attachments": [ { @@ -281,7 +281,7 @@ ], "exemplars": [ { - "value": 3, + "value": 12, "timestamp": "2022-04-08T18:45:16.286Z", "attachments": [ { diff --git a/exporter/metric/go.mod b/exporter/metric/go.mod index 710aa3791..feef952f0 100644 --- a/exporter/metric/go.mod +++ b/exporter/metric/go.mod @@ -21,7 +21,10 @@ require ( google.golang.org/protobuf v1.33.0 ) -require google.golang.org/genproto/googleapis/api v0.0.0-20230726155614-23370e0ffb3e +require ( + go.opentelemetry.io/otel/trace v1.23.1 + google.golang.org/genproto/googleapis/api v0.0.0-20230726155614-23370e0ffb3e +) require ( cloud.google.com/go/compute v1.23.0 // indirect @@ -41,7 +44,6 @@ require ( github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rogpeppe/go-internal v1.10.0 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/otel/trace v1.23.1 // indirect golang.org/x/crypto v0.17.0 // indirect golang.org/x/sync v0.3.0 // indirect golang.org/x/text v0.14.0 // indirect From 8c44470fd60e47b2528d105ec8c04f8dfbe5d35d Mon Sep 17 00:00:00 2001 From: David Ashpole Date: Thu, 7 Mar 2024 14:49:14 +0000 Subject: [PATCH 5/6] normalize timestamps for exemplars --- .../integrationtest/testcases/testcase.go | 30 ++++++++++++++++--- .../fixtures/metrics/histogram_expect.json | 8 ++--- .../metrics/histogram_gmp_expect.json | 8 ++--- 3 files changed, 34 insertions(+), 12 deletions(-) diff --git a/exporter/collector/integrationtest/testcases/testcase.go b/exporter/collector/integrationtest/testcases/testcase.go index 538967420..544d5c388 100644 --- a/exporter/collector/integrationtest/testcases/testcase.go +++ b/exporter/collector/integrationtest/testcases/testcase.go @@ -26,6 +26,7 @@ import ( "cloud.google.com/go/monitoring/apiv3/v2/monitoringpb" "go.opentelemetry.io/collector/pdata/ptrace" distributionpb "google.golang.org/genproto/googleapis/api/distribution" + googlemetricpb "google.golang.org/genproto/googleapis/api/metric" "github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/collector/integrationtest/protos" "github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric" @@ -291,6 +292,10 @@ func (tc *TestCase) LoadOTLPMetricsInput( SetStartTimestamp(pcommon.Timestamp) SetTimestamp(pcommon.Timestamp) } + type pointWithExemplars interface { + point + Exemplars() pmetric.ExemplarSlice + } updatePoint := func(p point) { if p.StartTimestamp() != 0 { p.SetStartTimestamp(pcommon.NewTimestampFromTime(startTime)) @@ -299,6 +304,13 @@ func (tc *TestCase) LoadOTLPMetricsInput( p.SetTimestamp(pcommon.NewTimestampFromTime(endTime)) } } + updatePointWithExemplars := func(p pointWithExemplars) { + updatePoint(p) + for i := 0; i < p.Exemplars().Len(); i++ { + p.Exemplars().At(i).SetTimestamp(pcommon.NewTimestampFromTime(endTime)) + } + + } for i := 0; i < metrics.ResourceMetrics().Len(); i++ { rm := metrics.ResourceMetrics().At(i) @@ -310,15 +322,15 @@ func (tc *TestCase) LoadOTLPMetricsInput( switch m.Type() { case pmetric.MetricTypeGauge: for i := 0; i < m.Gauge().DataPoints().Len(); i++ { - updatePoint(m.Gauge().DataPoints().At(i)) + updatePointWithExemplars(m.Gauge().DataPoints().At(i)) } case pmetric.MetricTypeSum: for i := 0; i < m.Sum().DataPoints().Len(); i++ { - updatePoint(m.Sum().DataPoints().At(i)) + updatePointWithExemplars(m.Sum().DataPoints().At(i)) } case pmetric.MetricTypeHistogram: for i := 0; i < m.Histogram().DataPoints().Len(); i++ { - updatePoint(m.Histogram().DataPoints().At(i)) + updatePointWithExemplars(m.Histogram().DataPoints().At(i)) } case pmetric.MetricTypeSummary: for i := 0; i < m.Summary().DataPoints().Len(); i++ { @@ -326,7 +338,7 @@ func (tc *TestCase) LoadOTLPMetricsInput( } case pmetric.MetricTypeExponentialHistogram: for i := 0; i < m.ExponentialHistogram().DataPoints().Len(); i++ { - updatePoint(m.ExponentialHistogram().DataPoints().At(i)) + updatePointWithExemplars(m.ExponentialHistogram().DataPoints().At(i)) } } } @@ -370,6 +382,11 @@ func (tc *TestCase) updateMetricExpectFixture( if p.GetInterval().GetEndTime() != nil { p.GetInterval().EndTime = timestamppb.New(endTime) } + if ts.GetValueType() == googlemetricpb.MetricDescriptor_DISTRIBUTION { + for _, ex := range p.GetValue().GetDistributionValue().GetExemplars() { + ex.Timestamp = timestamppb.New(endTime) + } + } } } } @@ -423,6 +440,11 @@ func normalizeTimeSeriesReqs(t testing.TB, reqs ...*monitoringpb.CreateTimeSerie if p.GetInterval().GetEndTime() != nil { p.GetInterval().EndTime = ×tamppb.Timestamp{} } + if ts.GetValueType() == googlemetricpb.MetricDescriptor_DISTRIBUTION { + for _, ex := range p.GetValue().GetDistributionValue().GetExemplars() { + ex.Timestamp = ×tamppb.Timestamp{} + } + } } // clear project ID from monitored resource diff --git a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_expect.json b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_expect.json index a424ab9fc..ea76e95a2 100644 --- a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_expect.json +++ b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_expect.json @@ -56,7 +56,7 @@ "exemplars": [ { "value": 4.1, - "timestamp": "2022-04-08T18:45:16.286Z", + "timestamp": "1970-01-01T00:00:00Z", "attachments": [ { "@type": "type.googleapis.com/google.monitoring.v3.SpanContext", @@ -130,7 +130,7 @@ "exemplars": [ { "value": 15.5, - "timestamp": "2022-04-08T18:45:16.286Z", + "timestamp": "1970-01-01T00:00:00Z", "attachments": [ { "@type": "type.googleapis.com/google.monitoring.v3.SpanContext", @@ -204,7 +204,7 @@ "exemplars": [ { "value": 4, - "timestamp": "2022-04-08T18:45:16.286Z", + "timestamp": "1970-01-01T00:00:00Z", "attachments": [ { "@type": "type.googleapis.com/google.monitoring.v3.SpanContext", @@ -278,7 +278,7 @@ "exemplars": [ { "value": 12, - "timestamp": "2022-04-08T18:45:16.286Z", + "timestamp": "1970-01-01T00:00:00Z", "attachments": [ { "@type": "type.googleapis.com/google.monitoring.v3.SpanContext", diff --git a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_gmp_expect.json b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_gmp_expect.json index be7448098..12bd35ef1 100644 --- a/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_gmp_expect.json +++ b/exporter/collector/integrationtest/testdata/fixtures/metrics/histogram_gmp_expect.json @@ -57,7 +57,7 @@ "exemplars": [ { "value": 4.1, - "timestamp": "2022-04-08T18:45:16.286Z", + "timestamp": "1970-01-01T00:00:00Z", "attachments": [ { "@type": "type.googleapis.com/google.monitoring.v3.SpanContext", @@ -132,7 +132,7 @@ "exemplars": [ { "value": 15.5, - "timestamp": "2022-04-08T18:45:16.286Z", + "timestamp": "1970-01-01T00:00:00Z", "attachments": [ { "@type": "type.googleapis.com/google.monitoring.v3.SpanContext", @@ -207,7 +207,7 @@ "exemplars": [ { "value": 4, - "timestamp": "2022-04-08T18:45:16.286Z", + "timestamp": "1970-01-01T00:00:00Z", "attachments": [ { "@type": "type.googleapis.com/google.monitoring.v3.SpanContext", @@ -282,7 +282,7 @@ "exemplars": [ { "value": 12, - "timestamp": "2022-04-08T18:45:16.286Z", + "timestamp": "1970-01-01T00:00:00Z", "attachments": [ { "@type": "type.googleapis.com/google.monitoring.v3.SpanContext", From f19eae662d89e429242624959ff4ae026fcfab70 Mon Sep 17 00:00:00 2001 From: David Ashpole Date: Thu, 7 Mar 2024 14:52:17 +0000 Subject: [PATCH 6/6] address feedback --- exporter/metric/metric.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/exporter/metric/metric.go b/exporter/metric/metric.go index 06b8a8b27..9b969f0a0 100644 --- a/exporter/metric/metric.go +++ b/exporter/metric/metric.go @@ -754,9 +754,8 @@ func toDistributionExemplar[N int64 | float64](Exemplars []metricdata.Exemplar[N for _, e := range Exemplars { attachments := []*anypb.Any{} if hasValidSpanContext(e) { - traceID, spanID := e.TraceID, e.SpanID sctx, err := anypb.New(&monitoringpb.SpanContext{ - SpanName: fmt.Sprintf("projects/%s/traces/%s/spans/%s", projectID, hex.EncodeToString(traceID[:]), hex.EncodeToString(spanID[:])), + SpanName: fmt.Sprintf("projects/%s/traces/%s/spans/%s", projectID, hex.EncodeToString(e.TraceID[:]), hex.EncodeToString(e.SpanID[:])), }) if err == nil { attachments = append(attachments, sctx)