From d16204bb785450600e9a00db05868e34aa2d9c78 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E3=81=82=E3=81=A7?= Date: Wed, 9 Aug 2023 09:51:07 +0200 Subject: [PATCH] Rename metrics --- CHANGELOG.md | 7 ++- src/autometrics/constants.py | 2 +- src/autometrics/test_caller.py | 2 +- src/autometrics/test_decorator.py | 64 ++++++++++++------------ src/autometrics/tracker/opentelemetry.py | 1 + src/autometrics/tracker/prometheus.py | 1 + src/autometrics/tracker/test_format.py | 25 +++++++++ 7 files changed, 67 insertions(+), 35 deletions(-) create mode 100644 src/autometrics/tracker/test_format.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 0317105..fa5affe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,7 +16,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### Changed -- +- Renamed the `function.calls.count` metric to `function.calls` (which is exported + to Prometheus as `function_calls_total`) to be in line with OpenTelemetry and + OpenMetrics naming conventions. **Dashboards and alerting rules must be updated.** +- When the `function.calls.duration` histogram is exported to Prometheus, it now + includes the units (`function_calls_duration_seconds`) to be in line with + Prometheus/OpenMetrics naming conventions. **Dashboards and alerting rules must be updated.** ### Deprecated diff --git a/src/autometrics/constants.py b/src/autometrics/constants.py index bd1cc52..813965c 100644 --- a/src/autometrics/constants.py +++ b/src/autometrics/constants.py @@ -1,6 +1,6 @@ """Constants used by autometrics""" -COUNTER_NAME = "function.calls.count" +COUNTER_NAME = "function.calls" HISTOGRAM_NAME = "function.calls.duration" CONCURRENCY_NAME = "function.calls.concurrent" # NOTE - The Rust implementation does not use `build.info`, instead opts for just `build_info` diff --git a/src/autometrics/test_caller.py b/src/autometrics/test_caller.py index b199e63..58e143f 100644 --- a/src/autometrics/test_caller.py +++ b/src/autometrics/test_caller.py @@ -38,6 +38,6 @@ def bar(): assert blob is not None data = blob.decode("utf-8") - expected = """function_calls_count_total{caller="test_caller_detection..bar",function="test_caller_detection..foo",module="autometrics.test_caller",objective_name="",objective_percentile="",result="ok"} 1.0""" + expected = """function_calls_total{caller="test_caller_detection..bar",function="test_caller_detection..foo",module="autometrics.test_caller",objective_name="",objective_percentile="",result="ok"} 1.0""" assert "wrapper" not in data assert expected in data diff --git a/src/autometrics/test_decorator.py b/src/autometrics/test_decorator.py index 73a3419..c9f53de 100644 --- a/src/autometrics/test_decorator.py +++ b/src/autometrics/test_decorator.py @@ -66,17 +66,17 @@ def test_basic(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="",function="basic_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 1.0""" + total_count = f"""function_calls_total{{caller="",function="basic_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 1.0""" assert total_count in data for latency in ObjectiveLatency: - query = f"""function_calls_duration_bucket{{function="basic_function",le="{latency.value}",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + query = f"""function_calls_duration_seconds_bucket{{function="basic_function",le="{latency.value}",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="basic_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_count = f"""function_calls_duration_seconds_count{{function="basic_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="basic_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_sum = f"""function_calls_duration_seconds_sum{{function="basic_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_sum in data @pytest.mark.asyncio @@ -94,17 +94,17 @@ async def test_basic_async(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="",function="basic_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 1.0""" + total_count = f"""function_calls_total{{caller="",function="basic_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 1.0""" assert total_count in data for latency in ObjectiveLatency: - query = f"""function_calls_duration_bucket{{function="basic_async_function",le="{latency.value}",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + query = f"""function_calls_duration_seconds_bucket{{function="basic_async_function",le="{latency.value}",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="basic_async_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_count = f"""function_calls_duration_seconds_count{{function="basic_async_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="basic_async_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_sum = f"""function_calls_duration_seconds_sum{{function="basic_async_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_sum in data def test_objectives(self): @@ -130,19 +130,19 @@ def test_objectives(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="",function="{function_name}",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 1.0""" + total_count = f"""function_calls_total{{caller="",function="{function_name}",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 1.0""" assert total_count in data # Check the latency buckets for objective in ObjectiveLatency: count = 0 if float(objective.value) <= sleep_duration else 1 - query = f"""function_calls_duration_bucket{{function="{function_name}",le="{objective.value}",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}} {count}""" + query = f"""function_calls_duration_seconds_bucket{{function="{function_name}",le="{objective.value}",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}} {count}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="{function_name}",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" + duration_count = f"""function_calls_duration_seconds_count{{function="{function_name}",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="{function_name}",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" + duration_sum = f"""function_calls_duration_seconds_sum{{function="{function_name}",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" assert duration_sum in data @pytest.mark.asyncio @@ -170,19 +170,19 @@ async def test_objectives_async(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="",function="basic_async_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 1.0""" + total_count = f"""function_calls_total{{caller="",function="basic_async_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 1.0""" assert total_count in data # Check the latency buckets for objective in ObjectiveLatency: count = 0 if float(objective.value) <= sleep_duration else 1 - query = f"""function_calls_duration_bucket{{function="basic_async_function",le="{objective.value}",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}} {count}""" + query = f"""function_calls_duration_seconds_bucket{{function="basic_async_function",le="{objective.value}",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}} {count}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="basic_async_function",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" + duration_count = f"""function_calls_duration_seconds_count{{function="basic_async_function",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="basic_async_function",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" + duration_sum = f"""function_calls_duration_seconds_sum{{function="basic_async_function",module="autometrics.test_decorator",objective_latency_threshold="{latency[0].value}",objective_name="{objective_name}",objective_percentile="{latency[1].value}"}}""" assert duration_sum in data def test_exception(self): @@ -199,17 +199,17 @@ def test_exception(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="",function="error_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 1.0""" + total_count = f"""function_calls_total{{caller="",function="error_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 1.0""" assert total_count in data for latency in ObjectiveLatency: - query = f"""function_calls_duration_bucket{{function="error_function",le="{latency.value}",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + query = f"""function_calls_duration_seconds_bucket{{function="error_function",le="{latency.value}",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="error_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_count = f"""function_calls_duration_seconds_count{{function="error_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="error_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_sum = f"""function_calls_duration_seconds_sum{{function="error_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_sum in data @pytest.mark.asyncio @@ -230,17 +230,17 @@ async def test_async_exception(self): assert blob is not None data = blob.decode("utf-8") - total_count = f"""function_calls_count_total{{caller="",function="error_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 1.0""" + total_count = f"""function_calls_total{{caller="",function="error_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 1.0""" assert total_count in data for latency in ObjectiveLatency: - query = f"""function_calls_duration_bucket{{function="error_async_function",le="{latency.value}",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + query = f"""function_calls_duration_seconds_bucket{{function="error_async_function",le="{latency.value}",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert query in data - duration_count = f"""function_calls_duration_count{{function="error_async_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_count = f"""function_calls_duration_seconds_count{{function="error_async_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_count in data - duration_sum = f"""function_calls_duration_sum{{function="error_async_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" + duration_sum = f"""function_calls_duration_seconds_sum{{function="error_async_function",module="autometrics.test_decorator",objective_latency_threshold="",objective_name="",objective_percentile=""}}""" assert duration_sum in data def test_initialize_counters_sync(self): @@ -253,10 +253,10 @@ def test_initialize_counters_sync(self): assert blob is not None data = blob.decode("utf-8") - total_count_ok = f"""function_calls_count_total{{caller="",function="never_called_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0""" + total_count_ok = f"""function_calls_total{{caller="",function="never_called_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0""" assert total_count_ok in data - total_count_error = f"""function_calls_count_total{{caller="",function="never_called_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0""" + total_count_error = f"""function_calls_total{{caller="",function="never_called_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0""" assert total_count_error in data def test_initialize_counters_sync_with_objective(self): @@ -273,10 +273,10 @@ def test_initialize_counters_sync_with_objective(self): assert blob is not None data = blob.decode("utf-8") - total_count_ok = f"""function_calls_count_total{{caller="",function="never_called_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0""" + total_count_ok = f"""function_calls_total{{caller="",function="never_called_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0""" assert total_count_ok in data - total_count_error = f"""function_calls_count_total{{caller="",function="never_called_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0""" + total_count_error = f"""function_calls_total{{caller="",function="never_called_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0""" assert total_count_error in data @pytest.mark.asyncio @@ -290,10 +290,10 @@ async def test_initialize_counters_async(self): assert blob is not None data = blob.decode("utf-8") - total_count_ok = f"""function_calls_count_total{{caller="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0""" + total_count_ok = f"""function_calls_total{{caller="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0""" assert total_count_ok in data - total_count_error = f"""function_calls_count_total{{caller="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0""" + total_count_error = f"""function_calls_total{{caller="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0""" assert total_count_error in data @pytest.mark.asyncio @@ -311,8 +311,8 @@ async def test_initialize_counters_async_with_objective(self): assert blob is not None data = blob.decode("utf-8") - total_count_ok = f"""function_calls_count_total{{caller="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0""" + total_count_ok = f"""function_calls_total{{caller="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0""" assert total_count_ok in data - total_count_error = f"""function_calls_count_total{{caller="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0""" + total_count_error = f"""function_calls_total{{caller="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0""" assert total_count_error in data diff --git a/src/autometrics/tracker/opentelemetry.py b/src/autometrics/tracker/opentelemetry.py index c180f4e..96e4031 100644 --- a/src/autometrics/tracker/opentelemetry.py +++ b/src/autometrics/tracker/opentelemetry.py @@ -64,6 +64,7 @@ def __init__(self): self.__histogram_instance = meter.create_histogram( name=HISTOGRAM_NAME, description=HISTOGRAM_DESCRIPTION, + unit="seconds", ) self.__up_down_counter_build_info_instance = meter.create_up_down_counter( name=BUILD_INFO_NAME, diff --git a/src/autometrics/tracker/prometheus.py b/src/autometrics/tracker/prometheus.py index e5750a6..5499fef 100644 --- a/src/autometrics/tracker/prometheus.py +++ b/src/autometrics/tracker/prometheus.py @@ -50,6 +50,7 @@ class PrometheusTracker: OBJECTIVE_PERCENTILE_PROMETHEUS, OBJECTIVE_LATENCY_THRESHOLD_PROMETHEUS, ], + unit="seconds", ) prom_gauge_build_info = Gauge( BUILD_INFO_NAME, BUILD_INFO_DESCRIPTION, [COMMIT_KEY, VERSION_KEY, BRANCH_KEY] diff --git a/src/autometrics/tracker/test_format.py b/src/autometrics/tracker/test_format.py new file mode 100644 index 0000000..2b676a6 --- /dev/null +++ b/src/autometrics/tracker/test_format.py @@ -0,0 +1,25 @@ +from prometheus_client.exposition import generate_latest +import pytest + +from . import init_tracker, TrackerType +from .opentelemetry import OpenTelemetryTracker +from ..decorator import autometrics + + +@pytest.mark.parametrize("tracker", TrackerType) +def test_metrics_format(tracker): + """Test that the metrics are formatted correctly.""" + init_tracker(tracker) + + @autometrics + def test_function(): + pass + + test_function() + + blob = generate_latest() + assert blob is not None + data = blob.decode("utf-8") + + assert "function_calls_total{" in data + assert "function_calls_duration_seconds_bucket{" in data