Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[8.13](backport #39204) Fix Azure Monitor support for multiple aggregation types #39279

Merged
merged 2 commits into from
Apr 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.next.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ https://github.com/elastic/beats/compare/v8.8.1\...main[Check the HEAD diff]

*Metricbeat*

- Fix Azure Monitor support for multiple aggregation types {issue}39192[39192] {pull}39204[39204]

*Osquerybeat*

Expand Down
39 changes: 39 additions & 0 deletions x-pack/metricbeat/module/azure/azure_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
// or more contributor license agreements. Licensed under the Elastic License;
// you may not use this file except in compliance with the Elastic License.

package azure

import (
"testing"

"github.com/stretchr/testify/assert"
)

func TestGroupMetricsDefinitionsByResourceId(t *testing.T) {

t.Run("Group metrics definitions by resource ID", func(t *testing.T) {
metrics := []Metric{
{
ResourceId: "resource-1",
Namespace: "namespace-1",
Names: []string{"metric-1"},
},
{
ResourceId: "resource-1",
Namespace: "namespace-1",
Names: []string{"metric-2"},
},
{
ResourceId: "resource-1",
Namespace: "namespace-1",
Names: []string{"metric-3"},
},
}

metricsByResourceId := groupMetricsDefinitionsByResourceId(metrics)

assert.Equal(t, 1, len(metricsByResourceId))
assert.Equal(t, 3, len(metricsByResourceId["resource-1"]))
})
}
156 changes: 156 additions & 0 deletions x-pack/metricbeat/module/azure/client_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,12 @@ import (
"testing"
"time"

"github.com/Azure/azure-sdk-for-go/sdk/azcore/to"
"github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/monitor/armmonitor"
"github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
)

var (
Expand All @@ -35,6 +37,7 @@ var (
},
}}},
}
countUnit = armmonitor.MetricUnit("Count")
)

func mockMapResourceMetrics(client *Client, resources []*armresources.GenericResourceExpanded, resourceConfig ResourceConfig) ([]Metric, error) {
Expand Down Expand Up @@ -112,4 +115,157 @@ func TestGetMetricValues(t *testing.T) {
assert.Equal(t, len(client.ResourceConfigurations.Metrics[0].Values), 0)
m.AssertExpectations(t)
})

t.Run("multiple aggregation types", func(t *testing.T) {
client := NewMockClient()
referenceTime := time.Now().UTC()
client.ResourceConfigurations = ResourceConfiguration{
Metrics: []Metric{
{
Namespace: "Microsoft.EventHub/Namespaces",
Names: []string{"ActiveConnections"},
Aggregations: "Maximum,Minimum,Average",
TimeGrain: "PT1M",
},
},
}

m := &MockService{}
m.On(
"GetMetricValues",
mock.Anything,
mock.Anything,
mock.Anything,
mock.Anything,
mock.Anything,
mock.Anything,
mock.Anything,
).Return(
[]armmonitor.Metric{{
ID: to.Ptr("test"),
Name: &armmonitor.LocalizableString{
Value: to.Ptr("ActiveConnections"),
LocalizedValue: to.Ptr("ActiveConnections"),
},
Timeseries: []*armmonitor.TimeSeriesElement{{
Data: []*armmonitor.MetricValue{{
Average: to.Ptr(1.0),
Maximum: to.Ptr(2.0),
Minimum: to.Ptr(3.0),
TimeStamp: to.Ptr(time.Now()),
}},
}},
Type: to.Ptr("Microsoft.Insights/metrics"),
Unit: &countUnit,
DisplayDescription: to.Ptr("Total Active Connections for Microsoft.EventHub."),
ErrorCode: to.Ptr("Success"),
}},
"PT1M",
nil,
)

client.AzureMonitorService = m
mr := MockReporterV2{}

metricValues := client.GetMetricValues(referenceTime, client.ResourceConfigurations.Metrics, &mr)

require.Equal(t, len(metricValues), 1)
require.Equal(t, len(metricValues[0].Values), 1)

assert.Equal(t, *metricValues[0].Values[0].avg, 1.0)
assert.Equal(t, *metricValues[0].Values[0].max, 2.0)
assert.Equal(t, *metricValues[0].Values[0].min, 3.0)

require.Equal(t, len(client.ResourceConfigurations.Metrics[0].Values), 1)

m.AssertExpectations(t)
})

t.Run("single aggregation types", func(t *testing.T) {
client := NewMockClient()
referenceTime := time.Now().UTC()
timestamp := time.Now().UTC()
client.ResourceConfigurations = ResourceConfiguration{
Metrics: []Metric{
{
Namespace: "Microsoft.EventHub/Namespaces",
Names: []string{"ActiveConnections"},
Aggregations: "Maximum",
TimeGrain: "PT1M",
}, {
Namespace: "Microsoft.EventHub/Namespaces",
Names: []string{"ActiveConnections"},
Aggregations: "Minimum",
TimeGrain: "PT1M",
}, {
Namespace: "Microsoft.EventHub/Namespaces",
Names: []string{"ActiveConnections"},
Aggregations: "Average",
TimeGrain: "PT1M",
},
},
}

m := &MockService{}

x := []struct {
aggregation string
data []*armmonitor.MetricValue
}{
{aggregation: "Maximum", data: []*armmonitor.MetricValue{{Maximum: to.Ptr(3.0), TimeStamp: to.Ptr(timestamp)}}},
{aggregation: "Minimum", data: []*armmonitor.MetricValue{{Minimum: to.Ptr(1.0), TimeStamp: to.Ptr(timestamp)}}},
{aggregation: "Average", data: []*armmonitor.MetricValue{{Average: to.Ptr(2.0), TimeStamp: to.Ptr(timestamp)}}},
}

for _, v := range x {
m.On(
"GetMetricValues",
mock.Anything,
mock.Anything,
mock.Anything,
mock.Anything,
mock.Anything,
v.aggregation,
mock.Anything,
).Return(
[]armmonitor.Metric{{
ID: to.Ptr("test"),
Name: &armmonitor.LocalizableString{
Value: to.Ptr("ActiveConnections"),
LocalizedValue: to.Ptr("ActiveConnections"),
},
Timeseries: []*armmonitor.TimeSeriesElement{{
Data: v.data,
}},
Type: to.Ptr("Microsoft.Insights/metrics"),
Unit: &countUnit,
DisplayDescription: to.Ptr("Total Active Connections for Microsoft.EventHub."),
ErrorCode: to.Ptr("Success"),
}},
"PT1M",
nil,
).Once()
}

client.AzureMonitorService = m
mr := MockReporterV2{}

metricValues := client.GetMetricValues(referenceTime, client.ResourceConfigurations.Metrics, &mr)

require.Equal(t, 3, len(metricValues))

require.Equal(t, 1, len(metricValues[0].Values))
require.Equal(t, 1, len(metricValues[1].Values))
require.Equal(t, 1, len(metricValues[2].Values))

require.NotNil(t, metricValues[0].Values[0].max, "max value is nil")
require.NotNil(t, metricValues[1].Values[0].min, "min value is nil")
require.NotNil(t, metricValues[2].Values[0].avg, "avg value is nil")

assert.Equal(t, *metricValues[0].Values[0].max, 3.0)
assert.Equal(t, *metricValues[1].Values[0].min, 1.0)
assert.Equal(t, *metricValues[2].Values[0].avg, 2.0)

m.AssertExpectations(t)
})
}
103 changes: 67 additions & 36 deletions x-pack/metricbeat/module/azure/data.go
Original file line number Diff line number Diff line change
Expand Up @@ -133,41 +133,8 @@ func mapToKeyValuePoints(metrics []Metric) []KeyValuePoint {
var points []KeyValuePoint
for _, metric := range metrics {
for _, value := range metric.Values {
point := KeyValuePoint{
Timestamp: value.timestamp,
Dimensions: mapstr.M{},
}

metricName := managePropertyName(value.name)
switch {
case value.min != nil:
point.Key = fmt.Sprintf("%s.%s", metricName, "min")
point.Value = value.min
case value.max != nil:
point.Key = fmt.Sprintf("%s.%s", metricName, "max")
point.Value = value.max
case value.avg != nil:
point.Key = fmt.Sprintf("%s.%s", metricName, "avg")
point.Value = value.avg
case value.total != nil:
point.Key = fmt.Sprintf("%s.%s", metricName, "total")
point.Value = value.total
case value.count != nil:
point.Key = fmt.Sprintf("%s.%s", metricName, "count")
point.Value = value.count
}

point.Namespace = metric.Namespace
point.ResourceId = metric.ResourceId
point.ResourceSubId = metric.ResourceSubId
point.TimeGrain = metric.TimeGrain

// The number of dimensions in the metric definition and the
// number of dimensions in the metric values should be the same.
//
// But, since definitions and values are retrieved from different
// API endpoints, we need to make sure that we don't panic if the
// number of dimensions is different.
dimensions := mapstr.M{}
if len(metric.Dimensions) == len(value.dimensions) {
// Take the dimension name from the metric definition and the
// dimension value from the metric value.
Expand All @@ -180,11 +147,75 @@ func mapToKeyValuePoints(metrics []Metric) []KeyValuePoint {
// Dimensions from metric definition and metric value are
// not guaranteed to be in the same order, so we need to
// find by name the right value for each dimension.
_, _ = point.Dimensions.Put(dim.Name, getDimensionValue(dim.Name, value.dimensions))
// _, _ = point.Dimensions.Put(dim.Name, getDimensionValue(dim.Name, value.dimensions))
_, _ = dimensions.Put(dim.Name, getDimensionValue(dim.Name, value.dimensions))
}
}

points = append(points, point)
if value.min != nil {
points = append(points, KeyValuePoint{
Key: fmt.Sprintf("%s.%s", metricName, "min"),
Value: value.min,
Namespace: metric.Namespace,
ResourceId: metric.ResourceId,
ResourceSubId: metric.ResourceSubId,
TimeGrain: metric.TimeGrain,
Dimensions: dimensions,
Timestamp: value.timestamp,
})
}

if value.max != nil {
points = append(points, KeyValuePoint{
Key: fmt.Sprintf("%s.%s", metricName, "max"),
Value: value.max,
Namespace: metric.Namespace,
ResourceId: metric.ResourceId,
ResourceSubId: metric.ResourceSubId,
TimeGrain: metric.TimeGrain,
Dimensions: dimensions,
Timestamp: value.timestamp,
})
}

if value.avg != nil {
points = append(points, KeyValuePoint{
Key: fmt.Sprintf("%s.%s", metricName, "avg"),
Value: value.avg,
Namespace: metric.Namespace,
ResourceId: metric.ResourceId,
ResourceSubId: metric.ResourceSubId,
TimeGrain: metric.TimeGrain,
Dimensions: dimensions,
Timestamp: value.timestamp,
})
}

if value.total != nil {
points = append(points, KeyValuePoint{
Key: fmt.Sprintf("%s.%s", metricName, "total"),
Value: value.total,
Namespace: metric.Namespace,
ResourceId: metric.ResourceId,
ResourceSubId: metric.ResourceSubId,
TimeGrain: metric.TimeGrain,
Dimensions: dimensions,
Timestamp: value.timestamp,
})
}

if value.count != nil {
points = append(points, KeyValuePoint{
Key: fmt.Sprintf("%s.%s", metricName, "count"),
Value: value.count,
Namespace: metric.Namespace,
ResourceId: metric.ResourceId,
ResourceSubId: metric.ResourceSubId,
TimeGrain: metric.TimeGrain,
Dimensions: dimensions,
Timestamp: value.timestamp,
})
}
}
}

Expand Down
Loading
Loading