From 6e251484960ff2d2c46b2db0f0f74b36f9b33d0b Mon Sep 17 00:00:00 2001 From: Chao Date: Sat, 15 May 2021 00:29:50 +0800 Subject: [PATCH] Remove internal protos usage in Prometheusremotewrite exporter --- .../prometheusremotewriteexporter/exporter.go | 126 +-- .../exporter_test.go | 300 ++----- .../prometheusremotewriteexporter/helper.go | 214 ++--- .../helper_test.go | 86 +- .../testutil_test.go | 751 +++++++----------- 5 files changed, 532 insertions(+), 945 deletions(-) diff --git a/exporter/prometheusremotewriteexporter/exporter.go b/exporter/prometheusremotewriteexporter/exporter.go index bb331e05db8..04947215c0c 100644 --- a/exporter/prometheusremotewriteexporter/exporter.go +++ b/exporter/prometheusremotewriteexporter/exporter.go @@ -35,9 +35,6 @@ import ( "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/consumer/consumererror" "go.opentelemetry.io/collector/consumer/pdata" - "go.opentelemetry.io/collector/internal" - otlp "go.opentelemetry.io/collector/internal/data/protogen/metrics/v1" - resourcev1 "go.opentelemetry.io/collector/internal/data/protogen/resource/v1" ) const ( @@ -108,24 +105,20 @@ func (prwe *PrwExporter) PushMetrics(ctx context.Context, md pdata.Metrics) erro tsMap := map[string]*prompb.TimeSeries{} dropped := 0 var errs []error - resourceMetrics := internal.MetricsToOtlp(md.InternalRep()).ResourceMetrics - for _, resourceMetric := range resourceMetrics { - if resourceMetric == nil { - continue - } - - resource := resourceMetric.Resource + resourceMetricsSlice := md.ResourceMetrics() + for i := 0; i < resourceMetricsSlice.Len(); i++ { + resourceMetrics := resourceMetricsSlice.At(i) + resource := resourceMetrics.Resource() + instrumentationLibraryMetricsSlice := resourceMetrics.InstrumentationLibraryMetrics() // TODO: add resource attributes as labels, probably in next PR - for _, instrumentationMetrics := range resourceMetric.InstrumentationLibraryMetrics { - if instrumentationMetrics == nil { - continue - } + for j := 0; j < instrumentationLibraryMetricsSlice.Len(); j++ { + instrumentationLibraryMetrics := instrumentationLibraryMetricsSlice.At(j) + metricSlice := instrumentationLibraryMetrics.Metrics() + // TODO: decide if instrumentation library information should be exported as labels - for _, metric := range instrumentationMetrics.Metrics { - if metric == nil { - dropped++ - continue - } + for k := 0; k < metricSlice.Len(); k++ { + metric := metricSlice.At(k) + // check for valid type and temporality combination and for matching data field and type if ok := validateMetrics(metric); !ok { dropped++ @@ -134,18 +127,18 @@ func (prwe *PrwExporter) PushMetrics(ctx context.Context, md pdata.Metrics) erro } // handle individual metric based on type - switch metric.Data.(type) { - case *otlp.Metric_DoubleSum, *otlp.Metric_IntSum, *otlp.Metric_DoubleGauge, *otlp.Metric_IntGauge: + switch metric.DataType() { + case pdata.MetricDataTypeDoubleSum, pdata.MetricDataTypeIntSum, pdata.MetricDataTypeDoubleGauge, pdata.MetricDataTypeIntGauge: if err := prwe.handleScalarMetric(tsMap, resource, metric); err != nil { dropped++ errs = append(errs, consumererror.Permanent(err)) } - case *otlp.Metric_DoubleHistogram, *otlp.Metric_IntHistogram: + case pdata.MetricDataTypeHistogram, pdata.MetricDataTypeIntHistogram: if err := prwe.handleHistogramMetric(tsMap, resource, metric); err != nil { dropped++ errs = append(errs, consumererror.Permanent(err)) } - case *otlp.Metric_DoubleSummary: + case pdata.MetricDataTypeSummary: if err := prwe.handleSummaryMetric(tsMap, resource, metric); err != nil { dropped++ errs = append(errs, consumererror.Permanent(err)) @@ -193,36 +186,42 @@ func validateAndSanitizeExternalLabels(externalLabels map[string]string) (map[st // handleScalarMetric processes data points in a single OTLP scalar metric by adding the each point as a Sample into // its corresponding TimeSeries in tsMap. // tsMap and metric cannot be nil, and metric must have a non-nil descriptor -func (prwe *PrwExporter) handleScalarMetric(tsMap map[string]*prompb.TimeSeries, resource resourcev1.Resource, metric *otlp.Metric) error { - switch metric.Data.(type) { +func (prwe *PrwExporter) handleScalarMetric(tsMap map[string]*prompb.TimeSeries, resource pdata.Resource, metric pdata.Metric) error { + switch metric.DataType() { // int points - case *otlp.Metric_DoubleGauge: - if metric.GetDoubleGauge().GetDataPoints() == nil { - return fmt.Errorf("nil data point. %s is dropped", metric.GetName()) + case pdata.MetricDataTypeDoubleGauge: + dataPoints := metric.DoubleGauge().DataPoints() + if dataPoints.Len() == 0 { + return fmt.Errorf("empty data points. %s is dropped", metric.Name()) } - for _, pt := range metric.GetDoubleGauge().GetDataPoints() { - addSingleDoubleDataPoint(pt, resource, metric, prwe.namespace, tsMap, prwe.externalLabels) + + for i := 0; i < dataPoints.Len(); i++ { + addSingleDoubleDataPoint(dataPoints.At(i), resource, metric, prwe.namespace, tsMap, prwe.externalLabels) } - case *otlp.Metric_IntGauge: - if metric.GetIntGauge().GetDataPoints() == nil { - return fmt.Errorf("nil data point. %s is dropped", metric.GetName()) + case pdata.MetricDataTypeIntGauge: + dataPoints := metric.IntGauge().DataPoints() + if dataPoints.Len() == 0 { + return fmt.Errorf("empty data points. %s is dropped", metric.Name()) } - for _, pt := range metric.GetIntGauge().GetDataPoints() { - addSingleIntDataPoint(pt, resource, metric, prwe.namespace, tsMap, prwe.externalLabels) + for i := 0; i < dataPoints.Len(); i++ { + addSingleIntDataPoint(dataPoints.At(i), resource, metric, prwe.namespace, tsMap, prwe.externalLabels) } - case *otlp.Metric_DoubleSum: - if metric.GetDoubleSum().GetDataPoints() == nil { - return fmt.Errorf("nil data point. %s is dropped", metric.GetName()) + case pdata.MetricDataTypeDoubleSum: + dataPoints := metric.DoubleSum().DataPoints() + if dataPoints.Len() == 0 { + return fmt.Errorf("empty data points. %s is dropped", metric.Name()) } - for _, pt := range metric.GetDoubleSum().GetDataPoints() { - addSingleDoubleDataPoint(pt, resource, metric, prwe.namespace, tsMap, prwe.externalLabels) + for i := 0; i < dataPoints.Len(); i++ { + addSingleDoubleDataPoint(dataPoints.At(i), resource, metric, prwe.namespace, tsMap, prwe.externalLabels) + } - case *otlp.Metric_IntSum: - if metric.GetIntSum().GetDataPoints() == nil { - return fmt.Errorf("nil data point. %s is dropped", metric.GetName()) + case pdata.MetricDataTypeIntSum: + dataPoints := metric.IntSum().DataPoints() + if dataPoints.Len() == 0 { + return fmt.Errorf("empty data points. %s is dropped", metric.Name()) } - for _, pt := range metric.GetIntSum().GetDataPoints() { - addSingleIntDataPoint(pt, resource, metric, prwe.namespace, tsMap, prwe.externalLabels) + for i := 0; i < dataPoints.Len(); i++ { + addSingleIntDataPoint(dataPoints.At(i), resource, metric, prwe.namespace, tsMap, prwe.externalLabels) } } return nil @@ -231,21 +230,23 @@ func (prwe *PrwExporter) handleScalarMetric(tsMap map[string]*prompb.TimeSeries, // handleHistogramMetric processes data points in a single OTLP histogram metric by mapping the sum, count and each // bucket of every data point as a Sample, and adding each Sample to its corresponding TimeSeries. // tsMap and metric cannot be nil. -func (prwe *PrwExporter) handleHistogramMetric(tsMap map[string]*prompb.TimeSeries, resource resourcev1.Resource, metric *otlp.Metric) error { - switch metric.Data.(type) { - case *otlp.Metric_IntHistogram: - if metric.GetIntHistogram().GetDataPoints() == nil { - return fmt.Errorf("nil data point. %s is dropped", metric.GetName()) +func (prwe *PrwExporter) handleHistogramMetric(tsMap map[string]*prompb.TimeSeries, resource pdata.Resource, metric pdata.Metric) error { + switch metric.DataType() { + case pdata.MetricDataTypeIntHistogram: + dataPoints := metric.IntHistogram().DataPoints() + if dataPoints.Len() == 0 { + return fmt.Errorf("empty data points. %s is dropped", metric.Name()) } - for _, pt := range metric.GetIntHistogram().GetDataPoints() { - addSingleIntHistogramDataPoint(pt, resource, metric, prwe.namespace, tsMap, prwe.externalLabels) + for i := 0; i < dataPoints.Len(); i++ { + addSingleIntHistogramDataPoint(dataPoints.At(i), resource, metric, prwe.namespace, tsMap, prwe.externalLabels) } - case *otlp.Metric_DoubleHistogram: - if metric.GetDoubleHistogram().GetDataPoints() == nil { - return fmt.Errorf("nil data point. %s is dropped", metric.GetName()) + case pdata.MetricDataTypeHistogram: + dataPoints := metric.Histogram().DataPoints() + if dataPoints.Len() == 0 { + return fmt.Errorf("empty data points. %s is dropped", metric.Name()) } - for _, pt := range metric.GetDoubleHistogram().GetDataPoints() { - addSingleDoubleHistogramDataPoint(pt, resource, metric, prwe.namespace, tsMap, prwe.externalLabels) + for i := 0; i < dataPoints.Len(); i++ { + addSingleDoubleHistogramDataPoint(dataPoints.At(i), resource, metric, prwe.namespace, tsMap, prwe.externalLabels) } } return nil @@ -254,12 +255,13 @@ func (prwe *PrwExporter) handleHistogramMetric(tsMap map[string]*prompb.TimeSeri // handleSummaryMetric processes data points in a single OTLP summary metric by mapping the sum, count and each // quantile of every data point as a Sample, and adding each Sample to its corresponding TimeSeries. // tsMap and metric cannot be nil. -func (prwe *PrwExporter) handleSummaryMetric(tsMap map[string]*prompb.TimeSeries, resource resourcev1.Resource, metric *otlp.Metric) error { - if metric.GetDoubleSummary().GetDataPoints() == nil { - return fmt.Errorf("nil data point. %s is dropped", metric.GetName()) +func (prwe *PrwExporter) handleSummaryMetric(tsMap map[string]*prompb.TimeSeries, resource pdata.Resource, metric pdata.Metric) error { + dataPoints := metric.Summary().DataPoints() + if dataPoints.Len() == 0 { + return fmt.Errorf("empty data points. %s is dropped", metric.Name()) } - for _, pt := range metric.GetDoubleSummary().GetDataPoints() { - addSingleDoubleSummaryDataPoint(pt, resource, metric, prwe.namespace, tsMap, prwe.externalLabels) + for i := 0; i < dataPoints.Len(); i++ { + addSingleDoubleSummaryDataPoint(dataPoints.At(i), resource, metric, prwe.namespace, tsMap, prwe.externalLabels) } return nil } diff --git a/exporter/prometheusremotewriteexporter/exporter_test.go b/exporter/prometheusremotewriteexporter/exporter_test.go index 29461869de6..831e8edfd3d 100644 --- a/exporter/prometheusremotewriteexporter/exporter_test.go +++ b/exporter/prometheusremotewriteexporter/exporter_test.go @@ -34,9 +34,6 @@ import ( "go.opentelemetry.io/collector/config/confighttp" "go.opentelemetry.io/collector/consumer/pdata" "go.opentelemetry.io/collector/exporter/exporterhelper" - "go.opentelemetry.io/collector/internal" - otlpcollectormetrics "go.opentelemetry.io/collector/internal/data/protogen/collector/metrics/v1" - otlp "go.opentelemetry.io/collector/internal/data/protogen/metrics/v1" "go.opentelemetry.io/collector/internal/testdata" ) @@ -281,238 +278,37 @@ func Test_PushMetrics(t *testing.T) { // success cases intSumBatch := testdata.GenerateMetricsManyMetricsSameResource(10) - doubleSumMetric := &otlpcollectormetrics.ExportMetricsServiceRequest{ - ResourceMetrics: []*otlp.ResourceMetrics{ - { - InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{ - { - Metrics: []*otlp.Metric{ - validMetrics1[validDoubleSum], - validMetrics2[validDoubleSum], - }, - }, - }, - }, - }, - } - doubleSumBatch := pdata.MetricsFromInternalRep(internal.MetricsFromOtlp(doubleSumMetric)) - - intGaugeMetric := &otlpcollectormetrics.ExportMetricsServiceRequest{ - ResourceMetrics: []*otlp.ResourceMetrics{ - { - InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{ - { - Metrics: []*otlp.Metric{ - validMetrics1[validIntGauge], - validMetrics2[validIntGauge], - }, - }, - }, - }, - }, - } - intGaugeBatch := pdata.MetricsFromInternalRep(internal.MetricsFromOtlp(intGaugeMetric)) - - doubleGaugeMetric := &otlpcollectormetrics.ExportMetricsServiceRequest{ - ResourceMetrics: []*otlp.ResourceMetrics{ - { - InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{ - { - Metrics: []*otlp.Metric{ - validMetrics1[validDoubleGauge], - validMetrics2[validDoubleGauge], - }, - }, - }, - }, - }, - } - doubleGaugeBatch := pdata.MetricsFromInternalRep(internal.MetricsFromOtlp(doubleGaugeMetric)) - - intHistogramMetric := &otlpcollectormetrics.ExportMetricsServiceRequest{ - ResourceMetrics: []*otlp.ResourceMetrics{ - { - InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{ - { - Metrics: []*otlp.Metric{ - validMetrics1[validIntHistogram], - validMetrics2[validIntHistogram], - }, - }, - }, - }, - }, - } - intHistogramBatch := pdata.MetricsFromInternalRep(internal.MetricsFromOtlp(intHistogramMetric)) - - doubleHistogramMetric := &otlpcollectormetrics.ExportMetricsServiceRequest{ - ResourceMetrics: []*otlp.ResourceMetrics{ - { - InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{ - { - Metrics: []*otlp.Metric{ - validMetrics1[validDoubleHistogram], - validMetrics2[validDoubleHistogram], - }, - }, - }, - }, - }, - } - doubleHistogramBatch := pdata.MetricsFromInternalRep(internal.MetricsFromOtlp(doubleHistogramMetric)) - - doubleSummaryMetric := &otlpcollectormetrics.ExportMetricsServiceRequest{ - ResourceMetrics: []*otlp.ResourceMetrics{ - { - InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{ - { - Metrics: []*otlp.Metric{ - validMetrics1[validDoubleSummary], - validMetrics2[validDoubleSummary], - }, - }, - }, - }, - }, - } - doubleSummaryBatch := pdata.MetricsFromInternalRep(internal.MetricsFromOtlp(doubleSummaryMetric)) + doubleSumBatch := getMetricsFromMetricList(validMetrics1[validDoubleSum], validMetrics2[validDoubleSum]) + + intGaugeBatch := getMetricsFromMetricList(validMetrics1[validIntGauge], validMetrics2[validIntGauge]) + + doubleGaugeBatch := getMetricsFromMetricList(validMetrics1[validDoubleGauge], validMetrics2[validDoubleGauge]) + + intHistogramBatch := getMetricsFromMetricList(validMetrics1[validIntHistogram], validMetrics2[validIntHistogram]) + + histogramBatch := getMetricsFromMetricList(validMetrics1[validHistogram], validMetrics2[validHistogram]) + + summaryBatch := getMetricsFromMetricList(validMetrics1[validSummary], validMetrics2[validSummary]) // len(BucketCount) > len(ExplicitBounds) - unmatchedBoundBucketIntHistMetric := &otlpcollectormetrics.ExportMetricsServiceRequest{ - ResourceMetrics: []*otlp.ResourceMetrics{ - { - InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{ - { - Metrics: []*otlp.Metric{ - validMetrics2[unmatchedBoundBucketIntHist], - }, - }, - }, - }, - }, - } - unmatchedBoundBucketIntHistBatch := pdata.MetricsFromInternalRep(internal.MetricsFromOtlp(unmatchedBoundBucketIntHistMetric)) - - unmatchedBoundBucketDoubleHistMetric := &otlpcollectormetrics.ExportMetricsServiceRequest{ - ResourceMetrics: []*otlp.ResourceMetrics{ - { - InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{ - { - Metrics: []*otlp.Metric{ - validMetrics2[unmatchedBoundBucketDoubleHist], - }, - }, - }, - }, - }, - } - unmatchedBoundBucketDoubleHistBatch := pdata.MetricsFromInternalRep(internal.MetricsFromOtlp(unmatchedBoundBucketDoubleHistMetric)) + unmatchedBoundBucketIntHistBatch := getMetricsFromMetricList(validMetrics2[unmatchedBoundBucketIntHist]) + + unmatchedBoundBucketHistBatch := getMetricsFromMetricList(validMetrics2[unmatchedBoundBucketHist]) // fail cases - nilDataPointIntGaugeMetric := &otlpcollectormetrics.ExportMetricsServiceRequest{ - ResourceMetrics: []*otlp.ResourceMetrics{ - { - InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{ - { - Metrics: []*otlp.Metric{ - errorMetrics[nilDataPointIntGauge], - }, - }, - }, - }, - }, - } - nilDataPointIntGaugeBatch := pdata.MetricsFromInternalRep(internal.MetricsFromOtlp(nilDataPointIntGaugeMetric)) - - nilDataPointDoubleGaugeMetric := &otlpcollectormetrics.ExportMetricsServiceRequest{ - ResourceMetrics: []*otlp.ResourceMetrics{ - { - InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{ - { - Metrics: []*otlp.Metric{ - errorMetrics[nilDataPointDoubleGauge], - }, - }, - }, - }, - }, - } - nilDataPointDoubleGaugeBatch := pdata.MetricsFromInternalRep(internal.MetricsFromOtlp(nilDataPointDoubleGaugeMetric)) - - nilDataPointIntSumMetric := &otlpcollectormetrics.ExportMetricsServiceRequest{ - ResourceMetrics: []*otlp.ResourceMetrics{ - { - InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{ - { - Metrics: []*otlp.Metric{ - errorMetrics[nilDataPointIntSum], - }, - }, - }, - }, - }, - } - nilDataPointIntSumBatch := pdata.MetricsFromInternalRep(internal.MetricsFromOtlp(nilDataPointIntSumMetric)) - - nilDataPointDoubleSumMetric := &otlpcollectormetrics.ExportMetricsServiceRequest{ - ResourceMetrics: []*otlp.ResourceMetrics{ - { - InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{ - { - Metrics: []*otlp.Metric{ - errorMetrics[nilDataPointDoubleSum], - }, - }, - }, - }, - }, - } - nilDataPointDoubleSumBatch := pdata.MetricsFromInternalRep(internal.MetricsFromOtlp(nilDataPointDoubleSumMetric)) - - nilDataPointIntHistogramMetric := &otlpcollectormetrics.ExportMetricsServiceRequest{ - ResourceMetrics: []*otlp.ResourceMetrics{ - { - InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{ - { - Metrics: []*otlp.Metric{ - errorMetrics[nilDataPointIntHistogram], - }, - }, - }, - }, - }, - } - nilDataPointIntHistogramBatch := pdata.MetricsFromInternalRep(internal.MetricsFromOtlp(nilDataPointIntHistogramMetric)) - - nilDataPointDoubleHistogramMetric := &otlpcollectormetrics.ExportMetricsServiceRequest{ - ResourceMetrics: []*otlp.ResourceMetrics{ - { - InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{ - { - Metrics: []*otlp.Metric{ - errorMetrics[nilDataPointDoubleHistogram], - }, - }, - }, - }, - }, - } - nilDataPointDoubleHistogramBatch := pdata.MetricsFromInternalRep(internal.MetricsFromOtlp(nilDataPointDoubleHistogramMetric)) - - nilDataPointDoubleSummaryMetric := &otlpcollectormetrics.ExportMetricsServiceRequest{ - ResourceMetrics: []*otlp.ResourceMetrics{ - { - InstrumentationLibraryMetrics: []*otlp.InstrumentationLibraryMetrics{ - { - Metrics: []*otlp.Metric{ - errorMetrics[nilDataPointDoubleSummary], - }, - }, - }, - }, - }, - } - nilDataPointDoubleSummaryBatch := pdata.MetricsFromInternalRep(internal.MetricsFromOtlp(nilDataPointDoubleSummaryMetric)) + emptyIntGaugeBatch := getMetricsFromMetricList(invalidMetrics[emptyIntGauge]) + + emptyDoubleGaugeBatch := getMetricsFromMetricList(invalidMetrics[emptyDoubleGauge]) + + emptyCumulativeIntSumBatch := getMetricsFromMetricList(invalidMetrics[emptyCumulativeIntSum]) + + emptyCumulativeDoubleSumBatch := getMetricsFromMetricList(invalidMetrics[emptyCumulativeDoubleSum]) + + emptyCumulativeIntHistogramBatch := getMetricsFromMetricList(invalidMetrics[emptyCumulativeIntHistogram]) + + emptyCumulativeHistogramBatch := getMetricsFromMetricList(invalidMetrics[emptyCumulativeHistogram]) + + emptyCumulativeSummaryBatch := getMetricsFromMetricList(invalidMetrics[emptySummary]) checkFunc := func(t *testing.T, r *http.Request, expected int) { body, err := ioutil.ReadAll(r.Body) @@ -590,16 +386,16 @@ func Test_PushMetrics(t *testing.T) { false, }, { - "doubleHistogram_case", - &doubleHistogramBatch, + "histogram_case", + &histogramBatch, checkFunc, 12, http.StatusAccepted, false, }, { - "doubleSummary_case", - &doubleSummaryBatch, + "summary_case", + &summaryBatch, checkFunc, 10, http.StatusAccepted, @@ -614,8 +410,8 @@ func Test_PushMetrics(t *testing.T) { false, }, { - "unmatchedBoundBucketDoubleHist_case", - &unmatchedBoundBucketDoubleHistBatch, + "unmatchedBoundBucketHist_case", + &unmatchedBoundBucketHistBatch, checkFunc, 5, http.StatusAccepted, @@ -623,63 +419,63 @@ func Test_PushMetrics(t *testing.T) { }, { "5xx_case", - &unmatchedBoundBucketDoubleHistBatch, + &unmatchedBoundBucketHistBatch, checkFunc, 5, http.StatusServiceUnavailable, true, }, { - "nilDataPointDoubleGauge_case", - &nilDataPointDoubleGaugeBatch, + "emptyDoubleGauge_case", + &emptyDoubleGaugeBatch, checkFunc, 0, http.StatusAccepted, true, }, { - "nilDataPointIntGauge_case", - &nilDataPointIntGaugeBatch, + "emptyIntGauge_case", + &emptyIntGaugeBatch, checkFunc, 0, http.StatusAccepted, true, }, { - "nilDataPointDoubleSum_case", - &nilDataPointDoubleSumBatch, + "emptyCumulativeDoubleSum_case", + &emptyCumulativeDoubleSumBatch, checkFunc, 0, http.StatusAccepted, true, }, { - "nilDataPointIntSum_case", - &nilDataPointIntSumBatch, + "emptyCumulativeIntSum_case", + &emptyCumulativeIntSumBatch, checkFunc, 0, http.StatusAccepted, true, }, { - "nilDataPointDoubleHistogram_case", - &nilDataPointDoubleHistogramBatch, + "emptyCumulativeHistogram_case", + &emptyCumulativeHistogramBatch, checkFunc, 0, http.StatusAccepted, true, }, { - "nilDataPointIntHistogram_case", - &nilDataPointIntHistogramBatch, + "emptyCumulativeIntHistogram_case", + &emptyCumulativeIntHistogramBatch, checkFunc, 0, http.StatusAccepted, true, }, { - "nilDataPointDoubleSummary_case", - &nilDataPointDoubleSummaryBatch, + "emptyCumulativeSummary_case", + &emptyCumulativeSummaryBatch, checkFunc, 0, http.StatusAccepted, diff --git a/exporter/prometheusremotewriteexporter/helper.go b/exporter/prometheusremotewriteexporter/helper.go index 2a5df623a11..dd615549782 100644 --- a/exporter/prometheusremotewriteexporter/helper.go +++ b/exporter/prometheusremotewriteexporter/helper.go @@ -27,9 +27,6 @@ import ( "github.com/prometheus/prometheus/prompb" "go.opentelemetry.io/collector/consumer/pdata" - common "go.opentelemetry.io/collector/internal/data/protogen/common/v1" - otlp "go.opentelemetry.io/collector/internal/data/protogen/metrics/v1" - resourcev1 "go.opentelemetry.io/collector/internal/data/protogen/resource/v1" ) const ( @@ -54,29 +51,22 @@ func (a ByLabelName) Swap(i, j int) { a[i], a[j] = a[j], a[i] } // validateMetrics returns a bool representing whether the metric has a valid type and temporality combination and a // matching metric type and field -func validateMetrics(metric *otlp.Metric) bool { - if metric == nil || metric.Data == nil { - return false - } - switch metric.Data.(type) { - case *otlp.Metric_DoubleGauge: - return metric.GetDoubleGauge() != nil - case *otlp.Metric_IntGauge: - return metric.GetIntGauge() != nil - case *otlp.Metric_DoubleSum: - return metric.GetDoubleSum() != nil && metric.GetDoubleSum().GetAggregationTemporality() == - otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE - case *otlp.Metric_IntSum: - return metric.GetIntSum() != nil && metric.GetIntSum().GetAggregationTemporality() == - otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE - case *otlp.Metric_DoubleHistogram: - return metric.GetDoubleHistogram() != nil && metric.GetDoubleHistogram().GetAggregationTemporality() == - otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE - case *otlp.Metric_IntHistogram: - return metric.GetIntHistogram() != nil && metric.GetIntHistogram().GetAggregationTemporality() == - otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE - case *otlp.Metric_DoubleSummary: - return metric.GetDoubleSummary() != nil +func validateMetrics(metric pdata.Metric) bool { + switch metric.DataType() { + case pdata.MetricDataTypeDoubleGauge: + return metric.DoubleGauge().DataPoints().Len() != 0 + case pdata.MetricDataTypeIntGauge: + return metric.IntGauge().DataPoints().Len() != 0 + case pdata.MetricDataTypeDoubleSum: + return metric.DoubleSum().DataPoints().Len() != 0 && metric.DoubleSum().AggregationTemporality() == pdata.AggregationTemporalityCumulative + case pdata.MetricDataTypeIntSum: + return metric.IntSum().DataPoints().Len() != 0 && metric.IntSum().AggregationTemporality() == pdata.AggregationTemporalityCumulative + case pdata.MetricDataTypeHistogram: + return metric.Histogram().DataPoints().Len() != 0 && metric.Histogram().AggregationTemporality() == pdata.AggregationTemporalityCumulative + case pdata.MetricDataTypeIntHistogram: + return metric.IntHistogram().DataPoints().Len() != 0 && metric.IntHistogram().AggregationTemporality() == pdata.AggregationTemporalityCumulative + case pdata.MetricDataTypeSummary: + return metric.Summary().DataPoints().Len() != 0 } return false } @@ -84,7 +74,7 @@ func validateMetrics(metric *otlp.Metric) bool { // addSample finds a TimeSeries in tsMap that corresponds to the label set labels, and add sample to the TimeSeries; it // creates a new TimeSeries in the map if not found. tsMap is unmodified if either of its parameters is nil. func addSample(tsMap map[string]*prompb.TimeSeries, sample *prompb.Sample, labels []prompb.Label, - metric *otlp.Metric) { + metric pdata.Metric) { if sample == nil || labels == nil || tsMap == nil { return @@ -108,9 +98,9 @@ func addSample(tsMap map[string]*prompb.TimeSeries, sample *prompb.Sample, label // TYPE-label1-value1- ... -labelN-valueN // the label slice should not contain duplicate label names; this method sorts the slice by label name before creating // the signature. -func timeSeriesSignature(metric *otlp.Metric, labels *[]prompb.Label) string { +func timeSeriesSignature(metric pdata.Metric, labels *[]prompb.Label) string { b := strings.Builder{} - b.WriteString(getTypeString(metric)) + b.WriteString(metric.DataType().String()) sort.Sort(ByLabelName(*labels)) @@ -127,7 +117,7 @@ func timeSeriesSignature(metric *otlp.Metric, labels *[]prompb.Label) string { // createLabelSet creates a slice of Cortex Label with OTLP labels and paris of string values. // Unpaired string value is ignored. String pairs overwrites OTLP labels if collision happens, and the overwrite is // logged. Resultant label names are sanitized. -func createLabelSet(resource resourcev1.Resource, labels []common.StringKeyValue, externalLabels map[string]string, extras ...string) []prompb.Label { +func createLabelSet(resource pdata.Resource, labels pdata.StringMap, externalLabels map[string]string, extras ...string) []prompb.Label { // map ensures no duplicate label name l := map[string]prompb.Label{} @@ -139,21 +129,25 @@ func createLabelSet(resource resourcev1.Resource, labels []common.StringKeyValue } } - for _, attr := range resource.Attributes { - if isUsefulResourceAttribute(attr) { - l[attr.Key] = prompb.Label{ - Name: sanitize(attr.Key), - Value: attr.Value.GetStringValue(), // TODO(jbd): Decide what to do with non-string attributes. + resource.Attributes().Range(func(key string, value pdata.AttributeValue) bool { + if isUsefulResourceAttribute(key) { + l[key] = prompb.Label{ + Name: sanitize(key), + Value: value.StringVal(), // TODO(jbd): Decide what to do with non-string attributes. } } - } - for _, lb := range labels { - l[lb.Key] = prompb.Label{ - Name: sanitize(lb.Key), - Value: lb.Value, + return true + }) + + labels.Range(func(key string, value string) bool { + l[key] = prompb.Label{ + Name: sanitize(key), + Value: value, } - } + + return true + }) for i := 0; i < len(extras); i += 2 { if i+1 >= len(extras) { @@ -182,12 +176,12 @@ func createLabelSet(resource resourcev1.Resource, labels []common.StringKeyValue return s } -func isUsefulResourceAttribute(attr common.KeyValue) bool { +func isUsefulResourceAttribute(key string) bool { // TODO(jbd): Allow users to configure what other resource // attributes to be included. // Decide what to do with non-string attributes. // We should always output "job" and "instance". - switch attr.Key { + switch key { case model.InstanceLabel: return true case model.JobLabel: @@ -198,15 +192,9 @@ func isUsefulResourceAttribute(attr common.KeyValue) bool { // getPromMetricName creates a Prometheus metric name by attaching namespace prefix, and _total suffix for Monotonic // metrics. -func getPromMetricName(metric *otlp.Metric, ns string) string { - if metric == nil { - return "" - } - +func getPromMetricName(metric pdata.Metric, ns string) string { // if the metric is counter, _total suffix should be applied - _, isCounter1 := metric.Data.(*otlp.Metric_DoubleSum) - _, isCounter2 := metric.Data.(*otlp.Metric_IntSum) - isCounter := isCounter1 || isCounter2 + isCounter := metric.DataType() == pdata.MetricDataTypeDoubleSum || metric.DataType() == pdata.MetricDataTypeIntSum b := strings.Builder{} @@ -215,7 +203,7 @@ func getPromMetricName(metric *otlp.Metric, ns string) string { if b.Len() > 0 { b.WriteString(delimiter) } - name := metric.GetName() + name := metric.Name() b.WriteString(name) // Including units makes two metrics with the same name and label set belong to two different TimeSeries if the @@ -267,8 +255,8 @@ func batchTimeSeries(tsMap map[string]*prompb.TimeSeries, maxBatchByteSize int) } // convertTimeStamp converts OTLP timestamp in ns to timestamp in ms -func convertTimeStamp(timestamp uint64) int64 { - return int64(timestamp / uint64(int64(time.Millisecond)/int64(time.Nanosecond))) +func convertTimeStamp(timestamp pdata.Timestamp) int64 { + return timestamp.AsTime().UnixNano() / (int64(time.Millisecond) / int64(time.Nanosecond)) } // copied from prometheus-go-metric-exporter @@ -301,202 +289,170 @@ func sanitizeRune(r rune) rune { return '_' } -func getTypeString(metric *otlp.Metric) string { - switch metric.Data.(type) { - case *otlp.Metric_DoubleGauge: - return strconv.Itoa(int(pdata.MetricDataTypeDoubleGauge)) - case *otlp.Metric_IntGauge: - return strconv.Itoa(int(pdata.MetricDataTypeIntGauge)) - case *otlp.Metric_DoubleSum: - return strconv.Itoa(int(pdata.MetricDataTypeDoubleSum)) - case *otlp.Metric_IntSum: - return strconv.Itoa(int(pdata.MetricDataTypeIntSum)) - case *otlp.Metric_DoubleHistogram: - return strconv.Itoa(int(pdata.MetricDataTypeHistogram)) - case *otlp.Metric_IntHistogram: - return strconv.Itoa(int(pdata.MetricDataTypeIntHistogram)) - } - return "" -} - // addSingleDoubleDataPoint converts the metric value stored in pt to a Prometheus sample, and add the sample // to its corresponding time series in tsMap -func addSingleDoubleDataPoint(pt *otlp.DoubleDataPoint, resource resourcev1.Resource, metric *otlp.Metric, namespace string, +func addSingleDoubleDataPoint(pt pdata.DoubleDataPoint, resource pdata.Resource, metric pdata.Metric, namespace string, tsMap map[string]*prompb.TimeSeries, externalLabels map[string]string) { - if pt == nil { - return - } // create parameters for addSample name := getPromMetricName(metric, namespace) - labels := createLabelSet(resource, pt.GetLabels(), externalLabels, nameStr, name) + labels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, name) sample := &prompb.Sample{ - Value: pt.Value, + Value: pt.Value(), // convert ns to ms - Timestamp: convertTimeStamp(pt.TimeUnixNano), + Timestamp: convertTimeStamp(pt.Timestamp()), } addSample(tsMap, sample, labels, metric) } // addSingleIntDataPoint converts the metric value stored in pt to a Prometheus sample, and add the sample // to its corresponding time series in tsMap -func addSingleIntDataPoint(pt *otlp.IntDataPoint, resource resourcev1.Resource, metric *otlp.Metric, namespace string, +func addSingleIntDataPoint(pt pdata.IntDataPoint, resource pdata.Resource, metric pdata.Metric, namespace string, tsMap map[string]*prompb.TimeSeries, externalLabels map[string]string) { - if pt == nil { - return - } // create parameters for addSample name := getPromMetricName(metric, namespace) - labels := createLabelSet(resource, pt.GetLabels(), externalLabels, nameStr, name) + labels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, name) sample := &prompb.Sample{ - Value: float64(pt.Value), + Value: float64(pt.Value()), // convert ns to ms - Timestamp: convertTimeStamp(pt.TimeUnixNano), + Timestamp: convertTimeStamp(pt.Timestamp()), } addSample(tsMap, sample, labels, metric) } // addSingleIntHistogramDataPoint converts pt to 2 + min(len(ExplicitBounds), len(BucketCount)) + 1 samples. It // ignore extra buckets if len(ExplicitBounds) > len(BucketCounts) -func addSingleIntHistogramDataPoint(pt *otlp.IntHistogramDataPoint, resource resourcev1.Resource, metric *otlp.Metric, namespace string, +func addSingleIntHistogramDataPoint(pt pdata.IntHistogramDataPoint, resource pdata.Resource, metric pdata.Metric, namespace string, tsMap map[string]*prompb.TimeSeries, externalLabels map[string]string) { - if pt == nil { - return - } - time := convertTimeStamp(pt.TimeUnixNano) + time := convertTimeStamp(pt.Timestamp()) // sum, count, and buckets of the histogram should append suffix to baseName baseName := getPromMetricName(metric, namespace) // treat sum as a sample in an individual TimeSeries sum := &prompb.Sample{ - Value: float64(pt.GetSum()), + Value: float64(pt.Sum()), Timestamp: time, } - sumlabels := createLabelSet(resource, pt.GetLabels(), externalLabels, nameStr, baseName+sumStr) + sumlabels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName+sumStr) addSample(tsMap, sum, sumlabels, metric) // treat count as a sample in an individual TimeSeries count := &prompb.Sample{ - Value: float64(pt.GetCount()), + Value: float64(pt.Count()), Timestamp: time, } - countlabels := createLabelSet(resource, pt.GetLabels(), externalLabels, nameStr, baseName+countStr) + countlabels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName+countStr) addSample(tsMap, count, countlabels, metric) // cumulative count for conversion to cumulative histogram var cumulativeCount uint64 // process each bound, ignore extra bucket values - for index, bound := range pt.GetExplicitBounds() { - if index >= len(pt.GetBucketCounts()) { + for index, bound := range pt.ExplicitBounds() { + if index >= len(pt.BucketCounts()) { break } - cumulativeCount += pt.GetBucketCounts()[index] + cumulativeCount += pt.BucketCounts()[index] bucket := &prompb.Sample{ Value: float64(cumulativeCount), Timestamp: time, } boundStr := strconv.FormatFloat(bound, 'f', -1, 64) - labels := createLabelSet(resource, pt.GetLabels(), externalLabels, nameStr, baseName+bucketStr, leStr, boundStr) + labels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName+bucketStr, leStr, boundStr) addSample(tsMap, bucket, labels, metric) } // add le=+Inf bucket - cumulativeCount += pt.GetBucketCounts()[len(pt.GetBucketCounts())-1] + cumulativeCount += pt.BucketCounts()[len(pt.BucketCounts())-1] infBucket := &prompb.Sample{ Value: float64(cumulativeCount), Timestamp: time, } - infLabels := createLabelSet(resource, pt.GetLabels(), externalLabels, nameStr, baseName+bucketStr, leStr, pInfStr) + infLabels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName+bucketStr, leStr, pInfStr) addSample(tsMap, infBucket, infLabels, metric) } // addSingleDoubleHistogramDataPoint converts pt to 2 + min(len(ExplicitBounds), len(BucketCount)) + 1 samples. It // ignore extra buckets if len(ExplicitBounds) > len(BucketCounts) -func addSingleDoubleHistogramDataPoint(pt *otlp.DoubleHistogramDataPoint, resource resourcev1.Resource, metric *otlp.Metric, namespace string, +func addSingleDoubleHistogramDataPoint(pt pdata.HistogramDataPoint, resource pdata.Resource, metric pdata.Metric, namespace string, tsMap map[string]*prompb.TimeSeries, externalLabels map[string]string) { - if pt == nil { - return - } - time := convertTimeStamp(pt.TimeUnixNano) + time := convertTimeStamp(pt.Timestamp()) // sum, count, and buckets of the histogram should append suffix to baseName baseName := getPromMetricName(metric, namespace) // treat sum as a sample in an individual TimeSeries sum := &prompb.Sample{ - Value: pt.GetSum(), + Value: pt.Sum(), Timestamp: time, } - sumlabels := createLabelSet(resource, pt.GetLabels(), externalLabels, nameStr, baseName+sumStr) + sumlabels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName+sumStr) addSample(tsMap, sum, sumlabels, metric) // treat count as a sample in an individual TimeSeries count := &prompb.Sample{ - Value: float64(pt.GetCount()), + Value: float64(pt.Count()), Timestamp: time, } - countlabels := createLabelSet(resource, pt.GetLabels(), externalLabels, nameStr, baseName+countStr) + countlabels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName+countStr) addSample(tsMap, count, countlabels, metric) // cumulative count for conversion to cumulative histogram var cumulativeCount uint64 // process each bound, based on histograms proto definition, # of buckets = # of explicit bounds + 1 - for index, bound := range pt.GetExplicitBounds() { - if index >= len(pt.GetBucketCounts()) { + for index, bound := range pt.ExplicitBounds() { + if index >= len(pt.BucketCounts()) { break } - cumulativeCount += pt.GetBucketCounts()[index] + cumulativeCount += pt.BucketCounts()[index] bucket := &prompb.Sample{ Value: float64(cumulativeCount), Timestamp: time, } boundStr := strconv.FormatFloat(bound, 'f', -1, 64) - labels := createLabelSet(resource, pt.GetLabels(), externalLabels, nameStr, baseName+bucketStr, leStr, boundStr) + labels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName+bucketStr, leStr, boundStr) addSample(tsMap, bucket, labels, metric) } // add le=+Inf bucket - cumulativeCount += pt.GetBucketCounts()[len(pt.GetBucketCounts())-1] + cumulativeCount += pt.BucketCounts()[len(pt.BucketCounts())-1] infBucket := &prompb.Sample{ Value: float64(cumulativeCount), Timestamp: time, } - infLabels := createLabelSet(resource, pt.GetLabels(), externalLabels, nameStr, baseName+bucketStr, leStr, pInfStr) + infLabels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName+bucketStr, leStr, pInfStr) addSample(tsMap, infBucket, infLabels, metric) } // addSingleDoubleSummaryDataPoint converts pt to len(QuantileValues) + 2 samples. -func addSingleDoubleSummaryDataPoint(pt *otlp.DoubleSummaryDataPoint, resource resourcev1.Resource, metric *otlp.Metric, namespace string, +func addSingleDoubleSummaryDataPoint(pt pdata.SummaryDataPoint, resource pdata.Resource, metric pdata.Metric, namespace string, tsMap map[string]*prompb.TimeSeries, externalLabels map[string]string) { - if pt == nil { - return - } - time := convertTimeStamp(pt.TimeUnixNano) + time := convertTimeStamp(pt.Timestamp()) // sum and count of the summary should append suffix to baseName baseName := getPromMetricName(metric, namespace) // treat sum as a sample in an individual TimeSeries sum := &prompb.Sample{ - Value: pt.GetSum(), + Value: pt.Sum(), Timestamp: time, } - sumlabels := createLabelSet(resource, pt.GetLabels(), externalLabels, nameStr, baseName+sumStr) + sumlabels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName+sumStr) addSample(tsMap, sum, sumlabels, metric) // treat count as a sample in an individual TimeSeries count := &prompb.Sample{ - Value: float64(pt.GetCount()), + Value: float64(pt.Count()), Timestamp: time, } - countlabels := createLabelSet(resource, pt.GetLabels(), externalLabels, nameStr, baseName+countStr) + countlabels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName+countStr) addSample(tsMap, count, countlabels, metric) // process each percentile/quantile - for _, qt := range pt.GetQuantileValues() { + for i := 0; i < pt.QuantileValues().Len(); i++ { + qt := pt.QuantileValues().At(i) quantile := &prompb.Sample{ - Value: qt.Value, + Value: qt.Value(), Timestamp: time, } - percentileStr := strconv.FormatFloat(qt.GetQuantile(), 'f', -1, 64) - qtlabels := createLabelSet(resource, pt.GetLabels(), externalLabels, nameStr, baseName, quantileStr, percentileStr) + percentileStr := strconv.FormatFloat(qt.Quantile(), 'f', -1, 64) + qtlabels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName, quantileStr, percentileStr) addSample(tsMap, quantile, qtlabels, metric) } } diff --git a/exporter/prometheusremotewriteexporter/helper_test.go b/exporter/prometheusremotewriteexporter/helper_test.go index ffb365a47ea..095831d861b 100644 --- a/exporter/prometheusremotewriteexporter/helper_test.go +++ b/exporter/prometheusremotewriteexporter/helper_test.go @@ -15,16 +15,12 @@ package prometheusremotewriteexporter import ( - "strconv" "testing" "github.com/prometheus/prometheus/prompb" "github.com/stretchr/testify/assert" "go.opentelemetry.io/collector/consumer/pdata" - common "go.opentelemetry.io/collector/internal/data/protogen/common/v1" - otlp "go.opentelemetry.io/collector/internal/data/protogen/metrics/v1" - resourcev1 "go.opentelemetry.io/collector/internal/data/protogen/resource/v1" ) // Test_validateMetrics checks validateMetrics return true if a type and temporality combination is valid, false @@ -34,7 +30,7 @@ func Test_validateMetrics(t *testing.T) { // define a single test type combTest struct { name string - metric *otlp.Metric + metric pdata.Metric want bool } @@ -51,11 +47,8 @@ func Test_validateMetrics(t *testing.T) { }) } - // append nil case - tests = append(tests, combTest{"invalid_nil", nil, false}) - for k, invalidMetric := range invalidMetrics { - name := "valid_" + k + name := "invalid_" + k tests = append(tests, combTest{ name, @@ -79,7 +72,7 @@ func Test_validateMetrics(t *testing.T) { // case. func Test_addSample(t *testing.T) { type testCase struct { - metric *otlp.Metric + metric pdata.Metric sample prompb.Sample labels []prompb.Label } @@ -122,9 +115,9 @@ func Test_addSample(t *testing.T) { twoPointsDifferentTs, }, } - t.Run("nil_case", func(t *testing.T) { + t.Run("empty_case", func(t *testing.T) { tsMap := map[string]*prompb.TimeSeries{} - addSample(tsMap, nil, nil, nil) + addSample(tsMap, nil, nil, pdata.NewMetric()) assert.Exactly(t, tsMap, map[string]*prompb.TimeSeries{}) }) // run tests @@ -143,33 +136,33 @@ func Test_timeSeriesSignature(t *testing.T) { tests := []struct { name string lbs []prompb.Label - metric *otlp.Metric + metric pdata.Metric want string }{ { "int64_signature", promLbs1, validMetrics1[validIntGauge], - strconv.Itoa(int(pdata.MetricDataTypeIntGauge)) + lb1Sig, + validMetrics1[validIntGauge].DataType().String() + lb1Sig, }, { "histogram_signature", promLbs2, validMetrics1[validIntHistogram], - strconv.Itoa(int(pdata.MetricDataTypeIntHistogram)) + lb2Sig, + validMetrics1[validIntHistogram].DataType().String() + lb2Sig, }, { "unordered_signature", getPromLabels(label22, value22, label21, value21), validMetrics1[validIntHistogram], - strconv.Itoa(int(pdata.MetricDataTypeIntHistogram)) + lb2Sig, + validMetrics1[validIntHistogram].DataType().String() + lb2Sig, }, // descriptor type cannot be nil, as checked by validateMetrics { "nil_case", nil, validMetrics1[validIntHistogram], - strconv.Itoa(int(pdata.MetricDataTypeIntHistogram)), + validMetrics1[validIntHistogram].DataType().String(), }, } @@ -186,17 +179,15 @@ func Test_timeSeriesSignature(t *testing.T) { func Test_createLabelSet(t *testing.T) { tests := []struct { name string - resource resourcev1.Resource - orig []common.StringKeyValue + resource pdata.Resource + orig pdata.StringMap externalLabels map[string]string extras []string want []prompb.Label }{ { "labels_clean", - resourcev1.Resource{ - Attributes: []common.KeyValue{}, - }, + getResource(), lbs1, map[string]string{}, []string{label31, value31, label32, value32}, @@ -204,18 +195,7 @@ func Test_createLabelSet(t *testing.T) { }, { "labels_with_resource", - resourcev1.Resource{ - Attributes: []common.KeyValue{ - { - Key: "job", - Value: common.AnyValue{Value: &common.AnyValue_StringValue{StringValue: "prometheus"}}, - }, - { - Key: "instance", - Value: common.AnyValue{Value: &common.AnyValue_StringValue{StringValue: "127.0.0.1:8080"}}, - }, - }, - }, + getResource("job", "prometheus", "instance", "127.0.0.1:8080"), lbs1, map[string]string{}, []string{label31, value31, label32, value32}, @@ -223,9 +203,7 @@ func Test_createLabelSet(t *testing.T) { }, { "labels_duplicate_in_extras", - resourcev1.Resource{ - Attributes: []common.KeyValue{}, - }, + getResource(), lbs1, map[string]string{}, []string{label11, value31}, @@ -233,9 +211,7 @@ func Test_createLabelSet(t *testing.T) { }, { "labels_dirty", - resourcev1.Resource{ - Attributes: []common.KeyValue{}, - }, + getResource(), lbs1Dirty, map[string]string{}, []string{label31 + dirty1, value31, label32, value32}, @@ -243,19 +219,15 @@ func Test_createLabelSet(t *testing.T) { }, { "no_original_case", - resourcev1.Resource{ - Attributes: []common.KeyValue{}, - }, - nil, + getResource(), + pdata.NewStringMap(), nil, []string{label31, value31, label32, value32}, getPromLabels(label31, value31, label32, value32), }, { "empty_extra_case", - resourcev1.Resource{ - Attributes: []common.KeyValue{}, - }, + getResource(), lbs1, map[string]string{}, []string{"", ""}, @@ -263,9 +235,7 @@ func Test_createLabelSet(t *testing.T) { }, { "single_left_over_case", - resourcev1.Resource{ - Attributes: []common.KeyValue{}, - }, + getResource(), lbs1, map[string]string{}, []string{label31, value31, label32}, @@ -273,9 +243,7 @@ func Test_createLabelSet(t *testing.T) { }, { "valid_external_labels", - resourcev1.Resource{ - Attributes: []common.KeyValue{}, - }, + getResource(), lbs1, exlbs1, []string{label31, value31, label32, value32}, @@ -283,9 +251,7 @@ func Test_createLabelSet(t *testing.T) { }, { "overwritten_external_labels", - resourcev1.Resource{ - Attributes: []common.KeyValue{}, - }, + getResource(), lbs1, exlbs2, []string{label31, value31, label32, value32}, @@ -306,15 +272,15 @@ func Test_createLabelSet(t *testing.T) { func Test_getPromMetricName(t *testing.T) { tests := []struct { name string - metric *otlp.Metric + metric pdata.Metric ns string want string }{ { - "nil_case", - nil, + "empty_case", + invalidMetrics[empty], ns1, - "", + "test_ns_", }, { "normal_case", diff --git a/exporter/prometheusremotewriteexporter/testutil_test.go b/exporter/prometheusremotewriteexporter/testutil_test.go index 193b9529009..754eaed4cfa 100644 --- a/exporter/prometheusremotewriteexporter/testutil_test.go +++ b/exporter/prometheusremotewriteexporter/testutil_test.go @@ -20,8 +20,7 @@ import ( "github.com/prometheus/prometheus/prompb" - commonpb "go.opentelemetry.io/collector/internal/data/protogen/common/v1" - otlp "go.opentelemetry.io/collector/internal/data/protogen/metrics/v1" + "go.opentelemetry.io/collector/consumer/pdata" ) var ( @@ -70,14 +69,14 @@ var ( ns1 = "test_ns" twoPointsSameTs = map[string]*prompb.TimeSeries{ - "2" + "-" + label11 + "-" + value11 + "-" + label12 + "-" + value12: getTimeSeries(getPromLabels(label11, value11, label12, value12), + "DoubleGauge" + "-" + label11 + "-" + value11 + "-" + label12 + "-" + value12: getTimeSeries(getPromLabels(label11, value11, label12, value12), getSample(float64(intVal1), msTime1), getSample(float64(intVal2), msTime2)), } twoPointsDifferentTs = map[string]*prompb.TimeSeries{ - "1" + "-" + label11 + "-" + value11 + "-" + label12 + "-" + value12: getTimeSeries(getPromLabels(label11, value11, label12, value12), + "IntGauge" + "-" + label11 + "-" + value11 + "-" + label12 + "-" + value12: getTimeSeries(getPromLabels(label11, value11, label12, value12), getSample(float64(intVal1), msTime1)), - "1" + "-" + label21 + "-" + value21 + "-" + label22 + "-" + value22: getTimeSeries(getPromLabels(label21, value21, label22, value22), + "IntGauge" + "-" + label21 + "-" + value21 + "-" + label22 + "-" + value22: getTimeSeries(getPromLabels(label21, value21, label22, value22), getSample(float64(intVal1), msTime2)), } bounds = []float64{0.1, 0.5, 0.99} @@ -87,456 +86,86 @@ var ( quantileValues = []float64{7, 8, 9} quantiles = getQuantiles(quantileBounds, quantileValues) - validIntGauge = "valid_IntGauge" - validDoubleGauge = "valid_DoubleGauge" - validIntSum = "valid_IntSum" - validDoubleSum = "valid_DoubleSum" - validIntHistogram = "valid_IntHistogram" - validDoubleHistogram = "valid_DoubleHistogram" - validDoubleSummary = "valid_DoubleSummary" - suffixedCounter = "valid_IntSum_total" + validIntGauge = "valid_IntGauge" + validDoubleGauge = "valid_DoubleGauge" + validIntSum = "valid_IntSum" + validDoubleSum = "valid_DoubleSum" + validIntHistogram = "valid_IntHistogram" + validHistogram = "valid_Histogram" + validSummary = "valid_Summary" + suffixedCounter = "valid_IntSum_total" validIntGaugeDirty = "*valid_IntGauge$" - unmatchedBoundBucketIntHist = "unmatchedBoundBucketIntHist" - unmatchedBoundBucketDoubleHist = "unmatchedBoundBucketDoubleHist" + unmatchedBoundBucketIntHist = "unmatchedBoundBucketIntHist" + unmatchedBoundBucketHist = "unmatchedBoundBucketHist" // valid metrics as input should not return error - validMetrics1 = map[string]*otlp.Metric{ - validIntGauge: { - Name: validIntGauge, - Data: &otlp.Metric_IntGauge{ - IntGauge: &otlp.IntGauge{ - DataPoints: []*otlp.IntDataPoint{ - getIntDataPoint(lbs1, intVal1, time1), - nil, - }, - }, - }, - }, - validDoubleGauge: { - Name: validDoubleGauge, - Data: &otlp.Metric_DoubleGauge{ - DoubleGauge: &otlp.DoubleGauge{ - DataPoints: []*otlp.DoubleDataPoint{ - getDoubleDataPoint(lbs1, floatVal1, time1), - nil, - }, - }, - }, - }, - validIntSum: { - Name: validIntSum, - Data: &otlp.Metric_IntSum{ - IntSum: &otlp.IntSum{ - DataPoints: []*otlp.IntDataPoint{ - getIntDataPoint(lbs1, intVal1, time1), - nil, - }, - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE, - }, - }, - }, - suffixedCounter: { - Name: suffixedCounter, - Data: &otlp.Metric_IntSum{ - IntSum: &otlp.IntSum{ - DataPoints: []*otlp.IntDataPoint{ - getIntDataPoint(lbs1, intVal1, time1), - nil, - }, - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE, - }, - }, - }, - validDoubleSum: { - Name: validDoubleSum, - Data: &otlp.Metric_DoubleSum{ - DoubleSum: &otlp.DoubleSum{ - DataPoints: []*otlp.DoubleDataPoint{ - getDoubleDataPoint(lbs1, floatVal1, time1), - nil, - }, - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE, - }, - }, - }, - validIntHistogram: { - Name: validIntHistogram, - Data: &otlp.Metric_IntHistogram{ - IntHistogram: &otlp.IntHistogram{ - DataPoints: []*otlp.IntHistogramDataPoint{ - getIntHistogramDataPoint(lbs1, time1, floatVal1, uint64(intVal1), bounds, buckets), - nil, - }, - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE, - }, - }, - }, - validDoubleHistogram: { - Name: validDoubleHistogram, - Data: &otlp.Metric_DoubleHistogram{ - DoubleHistogram: &otlp.DoubleHistogram{ - DataPoints: []*otlp.DoubleHistogramDataPoint{ - getDoubleHistogramDataPoint(lbs1, time1, floatVal1, uint64(intVal1), bounds, buckets), - nil, - }, - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE, - }, - }, - }, - validDoubleSummary: { - Name: validDoubleSummary, - Data: &otlp.Metric_DoubleSummary{ - DoubleSummary: &otlp.DoubleSummary{ - DataPoints: []*otlp.DoubleSummaryDataPoint{ - getDoubleSummaryDataPoint(lbs1, time1, floatVal1, uint64(intVal1), quantiles), - nil, - }, - }, - }, - }, + validMetrics1 = map[string]pdata.Metric{ + validIntGauge: getIntGaugeMetric(validIntGauge, lbs1, intVal1, time1), + validDoubleGauge: getDoubleGaugeMetric(validDoubleGauge, lbs1, floatVal1, time1), + validIntSum: getIntSumMetric(validIntSum, lbs1, intVal1, time1), + suffixedCounter: getIntSumMetric(suffixedCounter, lbs1, intVal1, time1), + validDoubleSum: getDoubleSumMetric(validDoubleSum, lbs1, floatVal1, time1), + validIntHistogram: getIntHistogramMetric(validIntHistogram, lbs1, time1, floatVal1, uint64(intVal1), bounds, buckets), + validHistogram: getHistogramMetric(validHistogram, lbs1, time1, floatVal1, uint64(intVal1), bounds, buckets), + validSummary: getSummaryMetric(validSummary, lbs1, time1, floatVal1, uint64(intVal1), quantiles), } - validMetrics2 = map[string]*otlp.Metric{ - validIntGauge: { - Name: validIntGauge, - Data: &otlp.Metric_IntGauge{ - IntGauge: &otlp.IntGauge{ - DataPoints: []*otlp.IntDataPoint{ - getIntDataPoint(lbs2, intVal2, time2), - }, - }, - }, - }, - validDoubleGauge: { - Name: validDoubleGauge, - Data: &otlp.Metric_DoubleGauge{ - DoubleGauge: &otlp.DoubleGauge{ - DataPoints: []*otlp.DoubleDataPoint{ - getDoubleDataPoint(lbs2, floatVal2, time2), - }, - }, - }, - }, - validIntSum: { - Name: validIntSum, - Data: &otlp.Metric_IntSum{ - IntSum: &otlp.IntSum{ - DataPoints: []*otlp.IntDataPoint{ - getIntDataPoint(lbs2, intVal2, time2), - }, - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE, - }, - }, - }, - validDoubleSum: { - Name: validDoubleSum, - Data: &otlp.Metric_DoubleSum{ - DoubleSum: &otlp.DoubleSum{ - DataPoints: []*otlp.DoubleDataPoint{ - getDoubleDataPoint(lbs2, floatVal2, time2), - }, - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE, - }, - }, - }, - validIntHistogram: { - Name: validIntHistogram, - Data: &otlp.Metric_IntHistogram{ - IntHistogram: &otlp.IntHistogram{ - DataPoints: []*otlp.IntHistogramDataPoint{ - getIntHistogramDataPoint(lbs2, time2, floatVal2, uint64(intVal2), bounds, buckets), - }, - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE, - }, - }, - }, - validDoubleHistogram: { - Name: validDoubleHistogram, - Data: &otlp.Metric_DoubleHistogram{ - DoubleHistogram: &otlp.DoubleHistogram{ - DataPoints: []*otlp.DoubleHistogramDataPoint{ - getDoubleHistogramDataPoint(lbs2, time2, floatVal2, uint64(intVal2), bounds, buckets), - }, - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE, - }, - }, - }, - validDoubleSummary: { - Name: validDoubleSummary, - Data: &otlp.Metric_DoubleSummary{ - DoubleSummary: &otlp.DoubleSummary{ - DataPoints: []*otlp.DoubleSummaryDataPoint{ - getDoubleSummaryDataPoint(lbs2, time2, floatVal2, uint64(intVal2), quantiles), - nil, - }, - }, - }, - }, - validIntGaugeDirty: { - Name: validIntGaugeDirty, - Data: &otlp.Metric_IntGauge{ - IntGauge: &otlp.IntGauge{ - DataPoints: []*otlp.IntDataPoint{ - getIntDataPoint(lbs1, intVal1, time1), - nil, - }, - }, - }, - }, - unmatchedBoundBucketIntHist: { - Name: unmatchedBoundBucketIntHist, - Data: &otlp.Metric_IntHistogram{ - IntHistogram: &otlp.IntHistogram{ - DataPoints: []*otlp.IntHistogramDataPoint{ - { - ExplicitBounds: []float64{0.1, 0.2, 0.3}, - BucketCounts: []uint64{1, 2}, - }, - }, - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE, - }, - }, - }, - unmatchedBoundBucketDoubleHist: { - Name: unmatchedBoundBucketDoubleHist, - Data: &otlp.Metric_DoubleHistogram{ - DoubleHistogram: &otlp.DoubleHistogram{ - DataPoints: []*otlp.DoubleHistogramDataPoint{ - { - ExplicitBounds: []float64{0.1, 0.2, 0.3}, - BucketCounts: []uint64{1, 2}, - }, - }, - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE, - }, - }, - }, + validMetrics2 = map[string]pdata.Metric{ + validIntGauge: getIntGaugeMetric(validIntGauge, lbs2, intVal2, time2), + validDoubleGauge: getDoubleGaugeMetric(validDoubleGauge, lbs2, floatVal2, time2), + validIntSum: getIntSumMetric(validIntSum, lbs2, intVal2, time2), + validDoubleSum: getDoubleSumMetric(validDoubleSum, lbs2, floatVal2, time2), + validIntHistogram: getIntHistogramMetric(validIntHistogram, lbs2, time2, floatVal2, uint64(intVal2), bounds, buckets), + validHistogram: getHistogramMetric(validHistogram, lbs2, time2, floatVal2, uint64(intVal2), bounds, buckets), + validSummary: getSummaryMetric(validSummary, lbs2, time2, floatVal2, uint64(intVal2), quantiles), + validIntGaugeDirty: getIntGaugeMetric(validIntGaugeDirty, lbs1, intVal1, time1), + unmatchedBoundBucketIntHist: getIntHistogramMetric(unmatchedBoundBucketIntHist, pdata.NewStringMap(), 0, 0, 0, []float64{0.1, 0.2, 0.3}, []uint64{1, 2}), + unmatchedBoundBucketHist: getHistogramMetric(unmatchedBoundBucketHist, pdata.NewStringMap(), 0, 0, 0, []float64{0.1, 0.2, 0.3}, []uint64{1, 2}), } - nilMetric = "nil" - empty = "empty" + empty = "empty" // Category 1: type and data field doesn't match - notMatchIntGauge = "noMatchIntGauge" - notMatchDoubleGauge = "notMatchDoubleGauge" - notMatchIntSum = "notMatchIntSum" - notMatchDoubleSum = "notMatchDoubleSum" - notMatchIntHistogram = "notMatchIntHistogram" - notMatchDoubleHistogram = "notMatchDoubleHistogram" - notMatchDoubleSummary = "notMatchDoubleSummary" + emptyIntGauge = "emptyIntGauge" + emptyDoubleGauge = "emptyDoubleGauge" + emptyIntSum = "emptyIntSum" + emptyDoubleSum = "emptyDoubleSum" + emptyIntHistogram = "emptyIntHistogram" + emptyHistogram = "emptyHistogram" + emptySummary = "emptySummary" // Category 2: invalid type and temporality combination - invalidIntSum = "invalidIntSum" - invalidDoubleSum = "invalidDoubleSum" - invalidIntHistogram = "invalidIntHistogram" - invalidDoubleHistogram = "invalidDoubleHistogram" - - // Category 3: nil data points - nilDataPointIntGauge = "nilDataPointIntGauge" - nilDataPointDoubleGauge = "nilDataPointDoubleGauge" - nilDataPointIntSum = "nilDataPointIntSum" - nilDataPointDoubleSum = "nilDataPointDoubleSum" - nilDataPointIntHistogram = "nilDataPointIntHistogram" - nilDataPointDoubleHistogram = "nilDataPointDoubleHistogram" - nilDataPointDoubleSummary = "nilDataPointDoubleSummary" - - // different metrics that will not pass validate metrics - invalidMetrics = map[string]*otlp.Metric{ - // nil - nilMetric: nil, - // Data = nil - empty: {}, - notMatchIntGauge: { - Name: notMatchIntGauge, - Data: &otlp.Metric_IntGauge{}, - }, - notMatchDoubleGauge: { - Name: notMatchDoubleGauge, - Data: &otlp.Metric_DoubleGauge{}, - }, - notMatchIntSum: { - Name: notMatchIntSum, - Data: &otlp.Metric_IntSum{}, - }, - notMatchDoubleSum: { - Name: notMatchDoubleSum, - Data: &otlp.Metric_DoubleSum{}, - }, - notMatchIntHistogram: { - Name: notMatchIntHistogram, - Data: &otlp.Metric_IntHistogram{}, - }, - notMatchDoubleHistogram: { - Name: notMatchDoubleHistogram, - Data: &otlp.Metric_DoubleHistogram{}, - }, - notMatchDoubleSummary: { - Name: notMatchDoubleSummary, - Data: &otlp.Metric_DoubleSummary{}, - }, - invalidIntSum: { - Name: invalidIntSum, - Data: &otlp.Metric_IntSum{ - IntSum: &otlp.IntSum{ - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_DELTA, - }, - }, - }, - invalidDoubleSum: { - Name: invalidDoubleSum, - Data: &otlp.Metric_DoubleSum{ - DoubleSum: &otlp.DoubleSum{ - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_DELTA, - }, - }, - }, - invalidIntHistogram: { - Name: invalidIntHistogram, - Data: &otlp.Metric_IntHistogram{ - IntHistogram: &otlp.IntHistogram{ - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_DELTA, - }, - }, - }, - invalidDoubleHistogram: { - Name: invalidDoubleHistogram, - Data: &otlp.Metric_DoubleHistogram{ - DoubleHistogram: &otlp.DoubleHistogram{ - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_DELTA, - }, - }, - }, - } - - // different metrics that will cause the exporter to return an error - errorMetrics = map[string]*otlp.Metric{ - - nilDataPointIntGauge: { - Name: nilDataPointIntGauge, - Data: &otlp.Metric_IntGauge{ - IntGauge: &otlp.IntGauge{DataPoints: nil}, - }, - }, - nilDataPointDoubleGauge: { - Name: nilDataPointDoubleGauge, - Data: &otlp.Metric_DoubleGauge{ - DoubleGauge: &otlp.DoubleGauge{DataPoints: nil}, - }, - }, - nilDataPointIntSum: { - Name: nilDataPointIntSum, - Data: &otlp.Metric_IntSum{ - IntSum: &otlp.IntSum{ - DataPoints: nil, - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE, - }, - }, - }, - nilDataPointDoubleSum: { - Name: nilDataPointDoubleSum, - Data: &otlp.Metric_DoubleSum{ - DoubleSum: &otlp.DoubleSum{ - DataPoints: nil, - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE, - }, - }, - }, - nilDataPointIntHistogram: { - Name: nilDataPointIntHistogram, - Data: &otlp.Metric_IntHistogram{ - IntHistogram: &otlp.IntHistogram{ - DataPoints: nil, - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE, - }, - }, - }, - nilDataPointDoubleHistogram: { - Name: nilDataPointDoubleHistogram, - Data: &otlp.Metric_DoubleHistogram{ - DoubleHistogram: &otlp.DoubleHistogram{ - DataPoints: nil, - AggregationTemporality: otlp.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE, - }, - }, - }, - nilDataPointDoubleSummary: { - Name: nilDataPointDoubleSummary, - Data: &otlp.Metric_DoubleSummary{ - DoubleSummary: &otlp.DoubleSummary{ - DataPoints: nil, - }, - }, - }, + emptyCumulativeIntSum = "emptyCumulativeIntSum" + emptyCumulativeDoubleSum = "emptyCumulativeDoubleSum" + emptyCumulativeIntHistogram = "emptyCumulativeIntHistogram" + emptyCumulativeHistogram = "emptyCumulativeHistogram" + + // different metrics that will not pass validate metrics and will cause the exporter to return an error + invalidMetrics = map[string]pdata.Metric{ + empty: pdata.NewMetric(), + emptyIntGauge: getEmptyIntGaugeMetric(emptyIntGauge), + emptyDoubleGauge: getEmptyDoubleGaugeMetric(emptyDoubleGauge), + emptyIntSum: getEmptyIntSumMetric(emptyIntSum), + emptyDoubleSum: getEmptyDoubleSumMetric(emptyDoubleSum), + emptyIntHistogram: getEmptyIntHistogramMetric(emptyIntHistogram), + emptyHistogram: getEmptyHistogramMetric(emptyHistogram), + emptySummary: getEmptySummaryMetric(emptySummary), + emptyCumulativeIntSum: getEmptyCumulativeIntSumMetric(emptyCumulativeIntSum), + emptyCumulativeDoubleSum: getEmptyCumulativeDoubleSumMetric(emptyCumulativeDoubleSum), + emptyCumulativeIntHistogram: getEmptyCumulativeIntHistogramMetric(emptyCumulativeIntHistogram), + emptyCumulativeHistogram: getEmptyCumulativeHistogramMetric(emptyCumulativeHistogram), } ) // OTLP metrics // labels must come in pairs -func getLabels(labels ...string) []commonpb.StringKeyValue { - var set []commonpb.StringKeyValue +func getLabels(labels ...string) pdata.StringMap { + stringMap := pdata.NewStringMap() for i := 0; i < len(labels); i += 2 { - set = append(set, commonpb.StringKeyValue{ - Key: labels[i], - Value: labels[i+1], - }) - } - return set -} - -func getIntDataPoint(labels []commonpb.StringKeyValue, value int64, ts uint64) *otlp.IntDataPoint { - return &otlp.IntDataPoint{ - Labels: labels, - StartTimeUnixNano: 0, - TimeUnixNano: ts, - Value: value, - } -} - -func getDoubleDataPoint(labels []commonpb.StringKeyValue, value float64, ts uint64) *otlp.DoubleDataPoint { - return &otlp.DoubleDataPoint{ - Labels: labels, - StartTimeUnixNano: 0, - TimeUnixNano: ts, - Value: value, - } -} - -func getIntHistogramDataPoint(labels []commonpb.StringKeyValue, ts uint64, sum float64, count uint64, bounds []float64, - buckets []uint64) *otlp.IntHistogramDataPoint { - return &otlp.IntHistogramDataPoint{ - Labels: labels, - StartTimeUnixNano: 0, - TimeUnixNano: ts, - Count: count, - Sum: int64(sum), - BucketCounts: buckets, - ExplicitBounds: bounds, - Exemplars: nil, - } -} - -func getDoubleHistogramDataPoint(labels []commonpb.StringKeyValue, ts uint64, sum float64, count uint64, - bounds []float64, buckets []uint64) *otlp.DoubleHistogramDataPoint { - return &otlp.DoubleHistogramDataPoint{ - Labels: labels, - TimeUnixNano: ts, - Count: count, - Sum: sum, - BucketCounts: buckets, - ExplicitBounds: bounds, - } -} - -func getDoubleSummaryDataPoint(labels []commonpb.StringKeyValue, ts uint64, sum float64, count uint64, - quantiles []*otlp.DoubleSummaryDataPoint_ValueAtQuantile) *otlp.DoubleSummaryDataPoint { - return &otlp.DoubleSummaryDataPoint{ - Labels: labels, - TimeUnixNano: ts, - Count: count, - Sum: sum, - QuantileValues: quantiles, + stringMap.Upsert(labels[i], labels[i+1]) } + return stringMap } // Prometheus TimeSeries @@ -571,14 +200,16 @@ func getTimeSeries(labels []prompb.Label, samples ...prompb.Sample) *prompb.Time } } -func getQuantiles(bounds []float64, values []float64) []*otlp.DoubleSummaryDataPoint_ValueAtQuantile { - quantiles := make([]*otlp.DoubleSummaryDataPoint_ValueAtQuantile, len(bounds)) +func getQuantiles(bounds []float64, values []float64) pdata.ValueAtQuantileSlice { + quantiles := pdata.NewValueAtQuantileSlice() + quantiles.Resize(len(bounds)) + for i := 0; i < len(bounds); i++ { - quantiles[i] = &otlp.DoubleSummaryDataPoint_ValueAtQuantile{ - Quantile: bounds[i], - Value: values[i], - } + quantile := quantiles.At(i) + quantile.SetQuantile(bounds[i]) + quantile.SetValue(values[i]) } + return quantiles } @@ -589,3 +220,239 @@ func getTimeseriesMap(timeseries []*prompb.TimeSeries) map[string]*prompb.TimeSe } return tsMap } + +func getEmptyIntGaugeMetric(name string) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeIntGauge) + return metric +} + +func getIntGaugeMetric(name string, labels pdata.StringMap, value int64, ts uint64) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeIntGauge) + dp := metric.IntGauge().DataPoints().AppendEmpty() + dp.SetValue(value) + + labels.Range(func(k string, v string) bool { + dp.LabelsMap().Upsert(k, v) + return true + }) + + dp.SetStartTimestamp(pdata.Timestamp(0)) + dp.SetTimestamp(pdata.Timestamp(ts)) + return metric +} + +func getEmptyDoubleGaugeMetric(name string) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeDoubleGauge) + return metric +} + +func getDoubleGaugeMetric(name string, labels pdata.StringMap, value float64, ts uint64) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeDoubleGauge) + dp := metric.DoubleGauge().DataPoints().AppendEmpty() + dp.SetValue(value) + + labels.Range(func(k string, v string) bool { + dp.LabelsMap().Upsert(k, v) + return true + }) + + dp.SetStartTimestamp(pdata.Timestamp(0)) + dp.SetTimestamp(pdata.Timestamp(ts)) + return metric +} + +func getEmptyIntSumMetric(name string) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeIntSum) + return metric +} + +func getEmptyCumulativeIntSumMetric(name string) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeIntSum) + metric.IntSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative) + return metric +} + +func getIntSumMetric(name string, labels pdata.StringMap, value int64, ts uint64) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeIntSum) + metric.IntSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative) + dp := metric.IntSum().DataPoints().AppendEmpty() + dp.SetValue(value) + + labels.Range(func(k string, v string) bool { + dp.LabelsMap().Upsert(k, v) + return true + }) + + dp.SetStartTimestamp(pdata.Timestamp(0)) + dp.SetTimestamp(pdata.Timestamp(ts)) + return metric +} + +func getEmptyDoubleSumMetric(name string) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeDoubleSum) + return metric +} + +func getEmptyCumulativeDoubleSumMetric(name string) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeDoubleSum) + metric.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative) + return metric +} + +func getDoubleSumMetric(name string, labels pdata.StringMap, value float64, ts uint64) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeDoubleSum) + metric.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative) + dp := metric.DoubleSum().DataPoints().AppendEmpty() + dp.SetValue(value) + + labels.Range(func(k string, v string) bool { + dp.LabelsMap().Upsert(k, v) + return true + }) + + dp.SetStartTimestamp(pdata.Timestamp(0)) + dp.SetTimestamp(pdata.Timestamp(ts)) + return metric +} + +func getEmptyIntHistogramMetric(name string) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeIntHistogram) + return metric +} + +func getEmptyCumulativeIntHistogramMetric(name string) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeIntHistogram) + metric.IntHistogram().SetAggregationTemporality(pdata.AggregationTemporalityCumulative) + return metric +} + +func getIntHistogramMetric(name string, labels pdata.StringMap, ts uint64, sum float64, count uint64, bounds []float64, buckets []uint64) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeIntHistogram) + metric.IntHistogram().SetAggregationTemporality(pdata.AggregationTemporalityCumulative) + dp := metric.IntHistogram().DataPoints().AppendEmpty() + dp.SetCount(count) + dp.SetSum(int64(sum)) + dp.SetBucketCounts(buckets) + dp.SetExplicitBounds(bounds) + + labels.Range(func(k string, v string) bool { + dp.LabelsMap().Upsert(k, v) + return true + }) + + dp.SetStartTimestamp(pdata.Timestamp(0)) + dp.SetTimestamp(pdata.Timestamp(ts)) + return metric +} + +func getEmptyHistogramMetric(name string) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeHistogram) + return metric +} + +func getEmptyCumulativeHistogramMetric(name string) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeHistogram) + metric.Histogram().SetAggregationTemporality(pdata.AggregationTemporalityCumulative) + return metric +} + +func getHistogramMetric(name string, labels pdata.StringMap, ts uint64, sum float64, count uint64, bounds []float64, buckets []uint64) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeHistogram) + metric.Histogram().SetAggregationTemporality(pdata.AggregationTemporalityCumulative) + dp := metric.Histogram().DataPoints().AppendEmpty() + dp.SetCount(count) + dp.SetSum(sum) + dp.SetBucketCounts(buckets) + dp.SetExplicitBounds(bounds) + + labels.Range(func(k string, v string) bool { + dp.LabelsMap().Upsert(k, v) + return true + }) + + dp.SetTimestamp(pdata.Timestamp(ts)) + return metric +} + +func getEmptySummaryMetric(name string) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeSummary) + return metric +} + +func getSummaryMetric(name string, labels pdata.StringMap, ts uint64, sum float64, count uint64, quantiles pdata.ValueAtQuantileSlice) pdata.Metric { + metric := pdata.NewMetric() + metric.SetName(name) + metric.SetDataType(pdata.MetricDataTypeSummary) + dp := metric.Summary().DataPoints().AppendEmpty() + dp.SetCount(count) + dp.SetSum(sum) + + labels.Range(func(k string, v string) bool { + dp.LabelsMap().Upsert(k, v) + return true + }) + + dp.SetTimestamp(pdata.Timestamp(ts)) + + quantiles.CopyTo(dp.QuantileValues()) + quantiles.At(0).Quantile() + + return metric +} + +func getResource(resources ...string) pdata.Resource { + resource := pdata.NewResource() + + for i := 0; i < len(resources); i += 2 { + resource.Attributes().Upsert(resources[i], pdata.NewAttributeValueString(resources[i+1])) + } + + return resource +} + +func getMetricsFromMetricList(metricList ...pdata.Metric) pdata.Metrics { + metrics := pdata.NewMetrics() + + rm := metrics.ResourceMetrics().AppendEmpty() + ilm := rm.InstrumentationLibraryMetrics().AppendEmpty() + ilm.Metrics().Resize(len(metricList)) + for i := 0; i < len(metricList); i++ { + metricList[i].CopyTo(ilm.Metrics().At(i)) + } + + return metrics +}