From f08c3ad585073857e2de29f4a61f0d3ef2049575 Mon Sep 17 00:00:00 2001 From: sroda Date: Mon, 6 Feb 2023 17:21:04 +0200 Subject: [PATCH 01/10] Add metrics instrumentation sqlalchemy --- .../instrumentation/sqlalchemy/__init__.py | 34 ++- .../instrumentation/sqlalchemy/engine.py | 105 +++++-- .../tests/test_sqlalchemy_metrics.py | 256 ++++++++++++++++++ 3 files changed, 362 insertions(+), 33 deletions(-) create mode 100644 instrumentation/opentelemetry-instrumentation-sqlalchemy/tests/test_sqlalchemy_metrics.py diff --git a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/__init__.py b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/__init__.py index b19de5ec96..94e60e5d1f 100644 --- a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/__init__.py +++ b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/__init__.py @@ -102,14 +102,18 @@ from sqlalchemy.engine.base import Engine from wrapt import wrap_function_wrapper as _w +from opentelemetry.metrics import Histogram, get_meter +from opentelemetry.trace import get_tracer +from opentelemetry.semconv.metrics import MetricInstruments + from opentelemetry.instrumentation.instrumentor import BaseInstrumentor from opentelemetry.instrumentation.sqlalchemy.engine import ( EngineTracer, - _get_tracer, _wrap_connect, _wrap_create_async_engine, _wrap_create_engine, ) +from opentelemetry.instrumentation.sqlalchemy.version import __version__ from opentelemetry.instrumentation.sqlalchemy.package import _instruments from opentelemetry.instrumentation.utils import unwrap @@ -136,32 +140,47 @@ def _instrument(self, **kwargs): An instrumented engine if passed in as an argument or list of instrumented engines, None otherwise. """ tracer_provider = kwargs.get("tracer_provider") + tracer = get_tracer(__name__, __version__, tracer_provider) + + meter_provider = kwargs.get("meter_provider") + meter = get_meter(__name__, __version__, meter_provider) + + connections_usage = meter.create_up_down_counter( + name=MetricInstruments.DB_CLIENT_CONNECTIONS_USAGE, + unit="connections", + description="The number of connections that are currently in state described by the state attribute.", + ) + enable_commenter = kwargs.get("enable_commenter", False) + _w( "sqlalchemy", "create_engine", - _wrap_create_engine(tracer_provider, enable_commenter), + _wrap_create_engine(tracer, connections_usage, enable_commenter), ) _w( "sqlalchemy.engine", "create_engine", - _wrap_create_engine(tracer_provider, enable_commenter), + _wrap_create_engine(tracer, connections_usage, enable_commenter), ) _w( "sqlalchemy.engine.base", "Engine.connect", - _wrap_connect(tracer_provider), + _wrap_connect(tracer, connections_usage), ) if parse_version(sqlalchemy.__version__).release >= (1, 4): _w( "sqlalchemy.ext.asyncio", "create_async_engine", - _wrap_create_async_engine(tracer_provider, enable_commenter), + _wrap_create_async_engine( + tracer, connections_usage, enable_commenter + ), ) if kwargs.get("engine") is not None: return EngineTracer( - _get_tracer(tracer_provider), + tracer, kwargs.get("engine"), + connections_usage, kwargs.get("enable_commenter", False), kwargs.get("commenter_options", {}), ) @@ -170,8 +189,9 @@ def _instrument(self, **kwargs): ): return [ EngineTracer( - _get_tracer(tracer_provider), + tracer, engine, + connections_usage, kwargs.get("enable_commenter", False), kwargs.get("commenter_options", {}), ) diff --git a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/engine.py b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/engine.py index f95751d9c6..4613bcd93d 100644 --- a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/engine.py +++ b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/engine.py @@ -20,9 +20,6 @@ ) from opentelemetry import trace -from opentelemetry.instrumentation.sqlalchemy.package import ( - _instrumenting_module_name, -) from opentelemetry.instrumentation.sqlalchemy.version import __version__ from opentelemetry.instrumentation.sqlcommenter_utils import _add_sql_comment from opentelemetry.instrumentation.utils import _get_opentelemetry_values @@ -44,15 +41,9 @@ def _normalize_vendor(vendor): return vendor -def _get_tracer(tracer_provider=None): - return trace.get_tracer( - _instrumenting_module_name, - __version__, - tracer_provider=tracer_provider, - ) - - -def _wrap_create_async_engine(tracer_provider=None, enable_commenter=False): +def _wrap_create_async_engine( + tracer, connections_usage, enable_commenter=False +): # pylint: disable=unused-argument def _wrap_create_async_engine_internal(func, module, args, kwargs): """Trace the SQLAlchemy engine, creating an `EngineTracer` @@ -60,32 +51,26 @@ def _wrap_create_async_engine_internal(func, module, args, kwargs): """ engine = func(*args, **kwargs) EngineTracer( - _get_tracer(tracer_provider), engine.sync_engine, enable_commenter + tracer, engine.sync_engine, connections_usage, enable_commenter ) return engine return _wrap_create_async_engine_internal -def _wrap_create_engine(tracer_provider=None, enable_commenter=False): - # pylint: disable=unused-argument - def _wrap_create_engine_internal(func, module, args, kwargs): +def _wrap_create_engine(tracer, connections_usage, enable_commenter=False): + def _wrap_create_engine_internal(func, _module, args, kwargs): """Trace the SQLAlchemy engine, creating an `EngineTracer` object that will listen to SQLAlchemy events. """ engine = func(*args, **kwargs) - EngineTracer(_get_tracer(tracer_provider), engine, enable_commenter) + EngineTracer(tracer, engine, connections_usage, enable_commenter) return engine return _wrap_create_engine_internal -def _wrap_connect(tracer_provider=None): - tracer = trace.get_tracer( - _instrumenting_module_name, - __version__, - tracer_provider=tracer_provider, - ) +def _wrap_connect(tracer, connections_usage): # pylint: disable=unused-argument def _wrap_connect_internal(func, module, args, kwargs): @@ -101,10 +86,16 @@ class EngineTracer: _remove_event_listener_params = [] def __init__( - self, tracer, engine, enable_commenter=False, commenter_options=None + self, + tracer, + engine, + connections_usage, + enable_commenter=False, + commenter_options=None, ): self.tracer = tracer self.engine = engine + self.connections_usage = connections_usage self.vendor = _normalize_vendor(engine.name) self.enable_commenter = enable_commenter self.commenter_options = commenter_options if commenter_options else {} @@ -117,6 +108,69 @@ def __init__( engine, "after_cursor_execute", _after_cur_exec ) self._register_event_listener(engine, "handle_error", _handle_error) + self._register_event_listener(engine, "connect", self._pool_connect) + self._register_event_listener(engine, "close", self._pool_close) + self._register_event_listener(engine, "checkin", self._pool_checkin) + self._register_event_listener(engine, "checkout", self._pool_checkout) + + def _pool_connect(self, _dbapi_connection, _connection_record): + self.connections_usage.add( + 1, + attributes={ + "pool.name": self._get_pool_name(), + "state": "idle", + }, + ) + + def _pool_close(self, _dbapi_connection, _connection_record): + self.connections_usage.add( + -1, + attributes={ + "pool.name": self._get_pool_name(), + "state": "idle", + }, + ) + + # Called when a connection returns to the pool. + def _pool_checkin(self, _dbapi_connection, _connection_record): + self.connections_usage.add( + -1, + attributes={ + "pool.name": self._get_pool_name(), + "state": "used", + }, + ) + + self.connections_usage.add( + 1, + attributes={ + "pool.name": self._get_pool_name(), + "state": "idle", + }, + ) + + # Called when a connection is retrieved from the Pool. + def _pool_checkout( + self, _dbapi_connection, _connection_record, _connection_proxy + ): + self.connections_usage.add( + -1, + attributes={ + "pool.name": self._get_pool_name(), + "state": "idle", + }, + ) + + self.connections_usage.add( + 1, + attributes={ + "pool.name": self._get_pool_name(), + "state": "used", + }, + ) + + def _get_pool_name(self): + return self.engine.pool.logging_name @classmethod def _register_event_listener(cls, target, identifier, func, *args, **kw): @@ -147,9 +201,8 @@ def _operation_name(self, db_name, statement): return self.vendor return " ".join(parts) - # pylint: disable=unused-argument def _before_cur_exec( - self, conn, cursor, statement, params, context, executemany + self, conn, cursor, statement, params, context, _executemany ): attrs, found = _get_attributes_from_url(conn.engine.url) if not found: diff --git a/instrumentation/opentelemetry-instrumentation-sqlalchemy/tests/test_sqlalchemy_metrics.py b/instrumentation/opentelemetry-instrumentation-sqlalchemy/tests/test_sqlalchemy_metrics.py new file mode 100644 index 0000000000..5264135254 --- /dev/null +++ b/instrumentation/opentelemetry-instrumentation-sqlalchemy/tests/test_sqlalchemy_metrics.py @@ -0,0 +1,256 @@ +# Copyright The OpenTelemetry Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from sqlalchemy import create_engine +from sqlalchemy.pool import QueuePool + +from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor +from opentelemetry.test.test_base import TestBase +from typing import Optional, Sequence +from opentelemetry.sdk.metrics._internal.point import Metric +from opentelemetry.sdk.metrics.export import ( + HistogramDataPoint, + NumberDataPoint, + DataPointT, +) + + +class TestSqlalchemyMetricsInstrumentation(TestBase): + def tearDown(self): + super().tearDown() + SQLAlchemyInstrumentor().uninstrument() + + def get_sorted_metrics(self): + resource_metrics = ( + self.memory_metrics_reader.get_metrics_data().resource_metrics + ) + + all_metrics = [] + for metrics in resource_metrics: + for scope_metrics in metrics.scope_metrics: + all_metrics.extend(scope_metrics.metrics) + + return self.sorted_metrics(all_metrics) + + @staticmethod + def sorted_metrics(metrics): + """ + Sorts metrics by metric name. + """ + return sorted( + metrics, + key=lambda m: m.name, + ) + + def assert_metric_expected( + self, + metric: Metric, + expected_data_points: Sequence[DataPointT], + est_value_delta: Optional[float] = 0, + ): + self.assertEqual( + len(expected_data_points), len(metric.data.data_points) + ) + for expected_data_point in expected_data_points: + self.assert_data_point_expected( + expected_data_point, metric.data.data_points, est_value_delta + ) + + @staticmethod + def is_data_points_equal( + expected_data_point: DataPointT, + data_point: DataPointT, + est_value_delta: Optional[float] = 0, + ): + if type(expected_data_point) != type(data_point) or not isinstance( + expected_data_point, (HistogramDataPoint, NumberDataPoint) + ): + return False + + values_diff = None + if isinstance(data_point, HistogramDataPoint): + values_diff = abs(expected_data_point.sum - data_point.sum) + elif isinstance(data_point, NumberDataPoint): + values_diff = abs(expected_data_point.value - data_point.value) + + return ( + values_diff <= est_value_delta + and expected_data_point.attributes == dict(data_point.attributes) + ) + + def assert_data_point_expected( + self, + expected_data_point: DataPointT, + data_points: Sequence[DataPointT], + est_value_delta: Optional[float] = 0, + ): + is_data_point_exist = False + for data_point in data_points: + if self.is_data_points_equal( + expected_data_point, data_point, est_value_delta + ): + is_data_point_exist = True + break + + self.assertTrue( + is_data_point_exist, + msg="data point {} does not exist".format( + expected_data_point.to_json() + ), + ) + + @staticmethod + def create_number_data_point(value, attributes): + return NumberDataPoint( + value=value, + attributes=attributes, + start_time_unix_nano=0, + time_unix_nano=0, + ) + + def assert_metrics_used_idle_as_expected(self, pool_name, idle, used): + metrics = self.get_sorted_metrics() + self.assertEqual(len(metrics), 1) + self.assert_metric_expected( + metrics[0], + [ + self.create_number_data_point( + value=idle, + attributes={"pool.name": pool_name, "state": "idle"}, + ), + self.create_number_data_point( + value=used, + attributes={"pool.name": pool_name, "state": "used"}, + ), + ], + ) + + def test_metrics_one_connection(self): + pool_name = "pool_test_name" + self.engine = create_engine( + "sqlite:///:memory:", + pool_size=5, + poolclass=QueuePool, + pool_logging_name=pool_name, + ) + + SQLAlchemyInstrumentor().instrument( + engine=self.engine, + tracer_provider=self.tracer_provider, + ) + metrics = self.get_sorted_metrics() + self.assertEqual(len(metrics), 0) + + with self.engine.connect(): + self.assert_metrics_used_idle_as_expected( + pool_name=pool_name, idle=0, used=1 + ) + + # After the connection is closed + self.assert_metrics_used_idle_as_expected( + pool_name=pool_name, idle=1, used=0 + ) + + def test_metrics_without_pool_name(self): + pool_name = "" + self.engine = create_engine( + "sqlite:///:memory:", + pool_size=5, + poolclass=QueuePool, + ) + + SQLAlchemyInstrumentor().instrument( + engine=self.engine, + tracer_provider=self.tracer_provider, + ) + metrics = self.get_sorted_metrics() + self.assertEqual(len(metrics), 0) + + with self.engine.connect(): + self.assert_metrics_used_idle_as_expected( + pool_name=pool_name, idle=0, used=1 + ) + + # After the connection is closed + self.assert_metrics_used_idle_as_expected( + pool_name=pool_name, idle=1, used=0 + ) + + def test_metrics_two_connections(self): + pool_name = "pool_test_name" + self.engine = create_engine( + "sqlite:///:memory:", + pool_size=5, + poolclass=QueuePool, + pool_logging_name=pool_name, + ) + + SQLAlchemyInstrumentor().instrument( + engine=self.engine, + tracer_provider=self.tracer_provider, + ) + metrics = self.get_sorted_metrics() + self.assertEqual(len(metrics), 0) + + with self.engine.connect(): + with self.engine.connect(): + self.assert_metrics_used_idle_as_expected( + pool_name, idle=0, used=2 + ) + + # After the first connection is closed + self.assert_metrics_used_idle_as_expected( + pool_name, idle=1, used=1 + ) + + # After the two connections are closed + self.assert_metrics_used_idle_as_expected(pool_name, idle=2, used=0) + + def test_metrics_connections(self): + pool_name = "pool_test_name" + self.engine = create_engine( + "sqlite:///:memory:", + pool_size=5, + poolclass=QueuePool, + pool_logging_name=pool_name, + ) + + SQLAlchemyInstrumentor().instrument( + engine=self.engine, + tracer_provider=self.tracer_provider, + ) + metrics = self.get_sorted_metrics() + self.assertEqual(len(metrics), 0) + + with self.engine.connect(): + with self.engine.connect(): + self.assert_metrics_used_idle_as_expected( + pool_name=pool_name, idle=0, used=2 + ) + + # After the first connection is closed + self.assert_metrics_used_idle_as_expected( + pool_name=pool_name, idle=1, used=1 + ) + + # Resume from idle to used + with self.engine.connect(): + self.assert_metrics_used_idle_as_expected( + pool_name=pool_name, idle=0, used=2 + ) + + # After the two connections are closed + self.assert_metrics_used_idle_as_expected( + pool_name=pool_name, idle=2, used=0 + ) From 28c8c2e20641e2caa83c8df20532d51adae22953 Mon Sep 17 00:00:00 2001 From: sroda Date: Mon, 6 Feb 2023 17:31:14 +0200 Subject: [PATCH 02/10] refactored the connection usage --- .../instrumentation/sqlalchemy/engine.py | 56 ++++++------------- 1 file changed, 18 insertions(+), 38 deletions(-) diff --git a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/engine.py b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/engine.py index 4613bcd93d..17bac74130 100644 --- a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/engine.py +++ b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/engine.py @@ -113,64 +113,44 @@ def __init__( self._register_event_listener(engine, "checkin", self._pool_checkin) self._register_event_listener(engine, "checkout", self._pool_checkout) - def _pool_connect(self, _dbapi_connection, _connection_record): - self.connections_usage.add( - 1, - attributes={ - "pool.name": self._get_pool_name(), - "state": "idle", - }, - ) + def _get_pool_name(self): + return self.engine.pool.logging_name or "" - def _pool_close(self, _dbapi_connection, _connection_record): + def _add_idle_to_connection_usage(self, value): self.connections_usage.add( - -1, + value, attributes={ "pool.name": self._get_pool_name(), "state": "idle", }, ) - # Called when a connection returns to the pool. - def _pool_checkin(self, _dbapi_connection, _connection_record): + def _add_used_to_connection_usage(self, value): self.connections_usage.add( - -1, + value, attributes={ "pool.name": self._get_pool_name(), "state": "used", }, ) - self.connections_usage.add( - 1, - attributes={ - "pool.name": self._get_pool_name(), - "state": "idle", - }, - ) + def _pool_connect(self, _dbapi_connection, _connection_record): + self._add_idle_to_connection_usage(1) + + def _pool_close(self, _dbapi_connection, _connection_record): + self._add_idle_to_connection_usage(-1) + + # Called when a connection returns to the pool. + def _pool_checkin(self, _dbapi_connection, _connection_record): + self._add_used_to_connection_usage(-1) + self._add_idle_to_connection_usage(1) # Called when a connection is retrieved from the Pool. def _pool_checkout( self, _dbapi_connection, _connection_record, _connection_proxy ): - self.connections_usage.add( - -1, - attributes={ - "pool.name": self._get_pool_name(), - "state": "idle", - }, - ) - - self.connections_usage.add( - 1, - attributes={ - "pool.name": self._get_pool_name(), - "state": "used", - }, - ) - - def _get_pool_name(self): - return self.engine.pool.logging_name + self._add_idle_to_connection_usage(-1) + self._add_used_to_connection_usage(1) @classmethod def _register_event_listener(cls, target, identifier, func, *args, **kw): From ae568304a82db39e1ef692b77b484b03c25208ad Mon Sep 17 00:00:00 2001 From: sroda Date: Mon, 6 Feb 2023 18:40:53 +0200 Subject: [PATCH 03/10] Add changelog entry and supports_metrics --- CHANGELOG.md | 3 +- instrumentation/README.md | 2 +- .../instrumentation/sqlalchemy/__init__.py | 4 +- .../instrumentation/sqlalchemy/engine.py | 3 +- .../instrumentation/sqlalchemy/package.py | 2 + .../tests/test_sqlalchemy_metrics.py | 44 ++++++++++--------- 6 files changed, 31 insertions(+), 27 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ba513aebfa..b9df44a816 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,7 +8,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## Unreleased ### Added - +- Add metrics instrumentation for sqlalchemy + ([#1645](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1645)) - `opentelemetry-instrumentation-redis` Add `sanitize_query` config option to allow query sanitization. ([#1572](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1572)) - `opentelemetry-instrumentation-celery` Record exceptions as events on the span. ([#1573](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1573)) diff --git a/instrumentation/README.md b/instrumentation/README.md index b1482a0227..b28f00c80c 100644 --- a/instrumentation/README.md +++ b/instrumentation/README.md @@ -34,7 +34,7 @@ | [opentelemetry-instrumentation-remoulade](./opentelemetry-instrumentation-remoulade) | remoulade >= 0.50 | No | [opentelemetry-instrumentation-requests](./opentelemetry-instrumentation-requests) | requests ~= 2.0 | Yes | [opentelemetry-instrumentation-sklearn](./opentelemetry-instrumentation-sklearn) | scikit-learn ~= 0.24.0 | No -| [opentelemetry-instrumentation-sqlalchemy](./opentelemetry-instrumentation-sqlalchemy) | sqlalchemy | No +| [opentelemetry-instrumentation-sqlalchemy](./opentelemetry-instrumentation-sqlalchemy) | sqlalchemy | Yes | [opentelemetry-instrumentation-sqlite3](./opentelemetry-instrumentation-sqlite3) | sqlite3 | No | [opentelemetry-instrumentation-starlette](./opentelemetry-instrumentation-starlette) | starlette ~= 0.13.0 | Yes | [opentelemetry-instrumentation-system-metrics](./opentelemetry-instrumentation-system-metrics) | psutil >= 5 | No diff --git a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/__init__.py b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/__init__.py index 94e60e5d1f..ed61a740ba 100644 --- a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/__init__.py +++ b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/__init__.py @@ -146,7 +146,7 @@ def _instrument(self, **kwargs): meter = get_meter(__name__, __version__, meter_provider) connections_usage = meter.create_up_down_counter( - name=MetricInstruments.DB_CLIENT_CONNECTIONS_USAGE, + name="db.client.connections.usage", unit="connections", description="The number of connections that are currently in state described by the state attribute.", ) @@ -166,7 +166,7 @@ def _instrument(self, **kwargs): _w( "sqlalchemy.engine.base", "Engine.connect", - _wrap_connect(tracer, connections_usage), + _wrap_connect(tracer), ) if parse_version(sqlalchemy.__version__).release >= (1, 4): _w( diff --git a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/engine.py b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/engine.py index 17bac74130..33715c11f0 100644 --- a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/engine.py +++ b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/engine.py @@ -70,8 +70,7 @@ def _wrap_create_engine_internal(func, _module, args, kwargs): return _wrap_create_engine_internal -def _wrap_connect(tracer, connections_usage): - +def _wrap_connect(tracer): # pylint: disable=unused-argument def _wrap_connect_internal(func, module, args, kwargs): with tracer.start_as_current_span( diff --git a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/package.py b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/package.py index f1f833287d..ead31723d1 100644 --- a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/package.py +++ b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/package.py @@ -15,3 +15,5 @@ _instrumenting_module_name = "opentelemetry.instrumentation.sqlalchemy" _instruments = ("sqlalchemy",) + +_supports_metrics = True diff --git a/instrumentation/opentelemetry-instrumentation-sqlalchemy/tests/test_sqlalchemy_metrics.py b/instrumentation/opentelemetry-instrumentation-sqlalchemy/tests/test_sqlalchemy_metrics.py index 5264135254..ec9013a4be 100644 --- a/instrumentation/opentelemetry-instrumentation-sqlalchemy/tests/test_sqlalchemy_metrics.py +++ b/instrumentation/opentelemetry-instrumentation-sqlalchemy/tests/test_sqlalchemy_metrics.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from sqlalchemy import create_engine +import sqlalchemy from sqlalchemy.pool import QueuePool from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor @@ -27,6 +27,12 @@ class TestSqlalchemyMetricsInstrumentation(TestBase): + def setUp(self): + super().setUp() + SQLAlchemyInstrumentor().instrument( + tracer_provider=self.tracer_provider, + ) + def tearDown(self): super().tearDown() SQLAlchemyInstrumentor().uninstrument() @@ -138,17 +144,13 @@ def assert_metrics_used_idle_as_expected(self, pool_name, idle, used): def test_metrics_one_connection(self): pool_name = "pool_test_name" - self.engine = create_engine( + self.engine = sqlalchemy.create_engine( "sqlite:///:memory:", pool_size=5, poolclass=QueuePool, pool_logging_name=pool_name, ) - SQLAlchemyInstrumentor().instrument( - engine=self.engine, - tracer_provider=self.tracer_provider, - ) metrics = self.get_sorted_metrics() self.assertEqual(len(metrics), 0) @@ -164,16 +166,12 @@ def test_metrics_one_connection(self): def test_metrics_without_pool_name(self): pool_name = "" - self.engine = create_engine( + self.engine = sqlalchemy.create_engine( "sqlite:///:memory:", pool_size=5, poolclass=QueuePool, ) - SQLAlchemyInstrumentor().instrument( - engine=self.engine, - tracer_provider=self.tracer_provider, - ) metrics = self.get_sorted_metrics() self.assertEqual(len(metrics), 0) @@ -189,17 +187,13 @@ def test_metrics_without_pool_name(self): def test_metrics_two_connections(self): pool_name = "pool_test_name" - self.engine = create_engine( + self.engine = sqlalchemy.create_engine( "sqlite:///:memory:", pool_size=5, poolclass=QueuePool, pool_logging_name=pool_name, ) - SQLAlchemyInstrumentor().instrument( - engine=self.engine, - tracer_provider=self.tracer_provider, - ) metrics = self.get_sorted_metrics() self.assertEqual(len(metrics), 0) @@ -219,17 +213,13 @@ def test_metrics_two_connections(self): def test_metrics_connections(self): pool_name = "pool_test_name" - self.engine = create_engine( + self.engine = sqlalchemy.create_engine( "sqlite:///:memory:", pool_size=5, poolclass=QueuePool, pool_logging_name=pool_name, ) - SQLAlchemyInstrumentor().instrument( - engine=self.engine, - tracer_provider=self.tracer_provider, - ) metrics = self.get_sorted_metrics() self.assertEqual(len(metrics), 0) @@ -254,3 +244,15 @@ def test_metrics_connections(self): self.assert_metrics_used_idle_as_expected( pool_name=pool_name, idle=2, used=0 ) + + def test_metric_uninstrument(self): + SQLAlchemyInstrumentor().uninstrument() + self.engine = sqlalchemy.create_engine( + "sqlite:///:memory:", + poolclass=QueuePool, + ) + + self.engine.connect() + + metrics = self.get_sorted_metrics() + self.assertEqual(len(metrics), 0) From eb5e1dc7389a5d767a1545954538fd456d85010b Mon Sep 17 00:00:00 2001 From: sroda Date: Mon, 6 Feb 2023 20:07:46 +0200 Subject: [PATCH 04/10] Fix lint --- .../instrumentation/sqlalchemy/__init__.py | 8 +-- .../tests/test_sqlalchemy_metrics.py | 66 +++++++++---------- 2 files changed, 34 insertions(+), 40 deletions(-) diff --git a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/__init__.py b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/__init__.py index ed61a740ba..c6322b566c 100644 --- a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/__init__.py +++ b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/__init__.py @@ -102,10 +102,6 @@ from sqlalchemy.engine.base import Engine from wrapt import wrap_function_wrapper as _w -from opentelemetry.metrics import Histogram, get_meter -from opentelemetry.trace import get_tracer -from opentelemetry.semconv.metrics import MetricInstruments - from opentelemetry.instrumentation.instrumentor import BaseInstrumentor from opentelemetry.instrumentation.sqlalchemy.engine import ( EngineTracer, @@ -113,9 +109,11 @@ _wrap_create_async_engine, _wrap_create_engine, ) -from opentelemetry.instrumentation.sqlalchemy.version import __version__ from opentelemetry.instrumentation.sqlalchemy.package import _instruments +from opentelemetry.instrumentation.sqlalchemy.version import __version__ from opentelemetry.instrumentation.utils import unwrap +from opentelemetry.metrics import get_meter +from opentelemetry.trace import get_tracer class SQLAlchemyInstrumentor(BaseInstrumentor): diff --git a/instrumentation/opentelemetry-instrumentation-sqlalchemy/tests/test_sqlalchemy_metrics.py b/instrumentation/opentelemetry-instrumentation-sqlalchemy/tests/test_sqlalchemy_metrics.py index ec9013a4be..e9dfd527f1 100644 --- a/instrumentation/opentelemetry-instrumentation-sqlalchemy/tests/test_sqlalchemy_metrics.py +++ b/instrumentation/opentelemetry-instrumentation-sqlalchemy/tests/test_sqlalchemy_metrics.py @@ -12,18 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Optional, Sequence + import sqlalchemy from sqlalchemy.pool import QueuePool from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor -from opentelemetry.test.test_base import TestBase -from typing import Optional, Sequence from opentelemetry.sdk.metrics._internal.point import Metric from opentelemetry.sdk.metrics.export import ( + DataPointT, HistogramDataPoint, NumberDataPoint, - DataPointT, ) +from opentelemetry.test.test_base import TestBase class TestSqlalchemyMetricsInstrumentation(TestBase): @@ -73,6 +74,7 @@ def assert_metric_expected( expected_data_point, metric.data.data_points, est_value_delta ) + # pylint: disable=unidiomatic-typecheck @staticmethod def is_data_points_equal( expected_data_point: DataPointT, @@ -111,9 +113,7 @@ def assert_data_point_expected( self.assertTrue( is_data_point_exist, - msg="data point {} does not exist".format( - expected_data_point.to_json() - ), + msg=f"Data point {expected_data_point} does not exist", ) @staticmethod @@ -125,7 +125,7 @@ def create_number_data_point(value, attributes): time_unix_nano=0, ) - def assert_metrics_used_idle_as_expected(self, pool_name, idle, used): + def assert_pool_idle_used_expected(self, pool_name, idle, used): metrics = self.get_sorted_metrics() self.assertEqual(len(metrics), 1) self.assert_metric_expected( @@ -144,7 +144,7 @@ def assert_metrics_used_idle_as_expected(self, pool_name, idle, used): def test_metrics_one_connection(self): pool_name = "pool_test_name" - self.engine = sqlalchemy.create_engine( + engine = sqlalchemy.create_engine( "sqlite:///:memory:", pool_size=5, poolclass=QueuePool, @@ -154,19 +154,19 @@ def test_metrics_one_connection(self): metrics = self.get_sorted_metrics() self.assertEqual(len(metrics), 0) - with self.engine.connect(): - self.assert_metrics_used_idle_as_expected( + with engine.connect(): + self.assert_pool_idle_used_expected( pool_name=pool_name, idle=0, used=1 ) # After the connection is closed - self.assert_metrics_used_idle_as_expected( + self.assert_pool_idle_used_expected( pool_name=pool_name, idle=1, used=0 ) def test_metrics_without_pool_name(self): pool_name = "" - self.engine = sqlalchemy.create_engine( + engine = sqlalchemy.create_engine( "sqlite:///:memory:", pool_size=5, poolclass=QueuePool, @@ -175,19 +175,19 @@ def test_metrics_without_pool_name(self): metrics = self.get_sorted_metrics() self.assertEqual(len(metrics), 0) - with self.engine.connect(): - self.assert_metrics_used_idle_as_expected( + with engine.connect(): + self.assert_pool_idle_used_expected( pool_name=pool_name, idle=0, used=1 ) # After the connection is closed - self.assert_metrics_used_idle_as_expected( + self.assert_pool_idle_used_expected( pool_name=pool_name, idle=1, used=0 ) def test_metrics_two_connections(self): pool_name = "pool_test_name" - self.engine = sqlalchemy.create_engine( + engine = sqlalchemy.create_engine( "sqlite:///:memory:", pool_size=5, poolclass=QueuePool, @@ -197,23 +197,19 @@ def test_metrics_two_connections(self): metrics = self.get_sorted_metrics() self.assertEqual(len(metrics), 0) - with self.engine.connect(): - with self.engine.connect(): - self.assert_metrics_used_idle_as_expected( - pool_name, idle=0, used=2 - ) + with engine.connect(): + with engine.connect(): + self.assert_pool_idle_used_expected(pool_name, idle=0, used=2) # After the first connection is closed - self.assert_metrics_used_idle_as_expected( - pool_name, idle=1, used=1 - ) + self.assert_pool_idle_used_expected(pool_name, idle=1, used=1) # After the two connections are closed - self.assert_metrics_used_idle_as_expected(pool_name, idle=2, used=0) + self.assert_pool_idle_used_expected(pool_name, idle=2, used=0) def test_metrics_connections(self): pool_name = "pool_test_name" - self.engine = sqlalchemy.create_engine( + engine = sqlalchemy.create_engine( "sqlite:///:memory:", pool_size=5, poolclass=QueuePool, @@ -223,36 +219,36 @@ def test_metrics_connections(self): metrics = self.get_sorted_metrics() self.assertEqual(len(metrics), 0) - with self.engine.connect(): - with self.engine.connect(): - self.assert_metrics_used_idle_as_expected( + with engine.connect(): + with engine.connect(): + self.assert_pool_idle_used_expected( pool_name=pool_name, idle=0, used=2 ) # After the first connection is closed - self.assert_metrics_used_idle_as_expected( + self.assert_pool_idle_used_expected( pool_name=pool_name, idle=1, used=1 ) # Resume from idle to used - with self.engine.connect(): - self.assert_metrics_used_idle_as_expected( + with engine.connect(): + self.assert_pool_idle_used_expected( pool_name=pool_name, idle=0, used=2 ) # After the two connections are closed - self.assert_metrics_used_idle_as_expected( + self.assert_pool_idle_used_expected( pool_name=pool_name, idle=2, used=0 ) def test_metric_uninstrument(self): SQLAlchemyInstrumentor().uninstrument() - self.engine = sqlalchemy.create_engine( + engine = sqlalchemy.create_engine( "sqlite:///:memory:", poolclass=QueuePool, ) - self.engine.connect() + engine.connect() metrics = self.get_sorted_metrics() self.assertEqual(len(metrics), 0) From b22f06aa7f1bbd9c254f201e7501109eb1509f4c Mon Sep 17 00:00:00 2001 From: sroda Date: Tue, 7 Feb 2023 11:17:05 +0200 Subject: [PATCH 05/10] Using semantic convention --- .github/workflows/test.yml | 2 +- .../src/opentelemetry/instrumentation/sqlalchemy/__init__.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 626dc66dbb..1d17673e6b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -6,7 +6,7 @@ on: - 'release/*' pull_request: env: - CORE_REPO_SHA: 042d7a7921e25936decd95addf4fb1d30afb88e2 + CORE_REPO_SHA: f5fb6b1353929cf8039b1d38f97450866357d901 jobs: build: diff --git a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/__init__.py b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/__init__.py index c6322b566c..77db23b417 100644 --- a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/__init__.py +++ b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/__init__.py @@ -113,6 +113,7 @@ from opentelemetry.instrumentation.sqlalchemy.version import __version__ from opentelemetry.instrumentation.utils import unwrap from opentelemetry.metrics import get_meter +from opentelemetry.semconv.metrics import MetricInstruments from opentelemetry.trace import get_tracer @@ -144,7 +145,7 @@ def _instrument(self, **kwargs): meter = get_meter(__name__, __version__, meter_provider) connections_usage = meter.create_up_down_counter( - name="db.client.connections.usage", + name=MetricInstruments.DB_CLIENT_CONNECTIONS_USAGE, unit="connections", description="The number of connections that are currently in state described by the state attribute.", ) From 176e8e6e9e4aad23b7c450d2e96a8e6b6e017bf5 Mon Sep 17 00:00:00 2001 From: Shalev Roda <65566801+shalevr@users.noreply.github.com> Date: Fri, 17 Feb 2023 22:39:37 +0200 Subject: [PATCH 06/10] Update test.yml merge with main --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1d17673e6b..08cabf2de5 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -6,7 +6,7 @@ on: - 'release/*' pull_request: env: - CORE_REPO_SHA: f5fb6b1353929cf8039b1d38f97450866357d901 + CORE_REPO_SHA: d0bb12b34b0c487198c935001636b6163485a50f jobs: build: From 3104b4b3e1c0c1a7680d3847ec601d91f9727da9 Mon Sep 17 00:00:00 2001 From: sroda Date: Sat, 18 Feb 2023 11:21:11 +0200 Subject: [PATCH 07/10] Fix Changelog --- CHANGELOG.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 36830ca439..9ec1abf77f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,12 +7,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## Unreleased +- Add metrics instrumentation for sqlalchemy + ([#1645](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1645)) + ## Version 1.16.0/0.37b0 (2023-02-17) ### Added -- Add metrics instrumentation for sqlalchemy - ([#1645](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1645)) - Support `aio_pika` 9.x (([#1670](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1670]) - `opentelemetry-instrumentation-redis` Add `sanitize_query` config option to allow query sanitization. ([#1572](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1572)) - `opentelemetry-instrumentation-elasticsearch` Add optional db.statement query sanitization. From 58532fc481cc0eefd09af99560115fa7f3eb27e7 Mon Sep 17 00:00:00 2001 From: sroda Date: Wed, 22 Feb 2023 00:09:18 +0200 Subject: [PATCH 08/10] Remove unused code --- .../src/opentelemetry/instrumentation/sqlalchemy/package.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/package.py b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/package.py index ead31723d1..e3029f57b6 100644 --- a/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/package.py +++ b/instrumentation/opentelemetry-instrumentation-sqlalchemy/src/opentelemetry/instrumentation/sqlalchemy/package.py @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -_instrumenting_module_name = "opentelemetry.instrumentation.sqlalchemy" - _instruments = ("sqlalchemy",) _supports_metrics = True From dece77d0adf43cade48406670e6ce2727e1302b1 Mon Sep 17 00:00:00 2001 From: sroda Date: Thu, 23 Feb 2023 11:40:11 +0200 Subject: [PATCH 09/10] work with the metric basic test function from test_base.py --- .github/workflows/test.yml | 2 +- .../tests/test_sqlalchemy_metrics.py | 95 ------------------- 2 files changed, 1 insertion(+), 96 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 08cabf2de5..a55453d65b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -6,7 +6,7 @@ on: - 'release/*' pull_request: env: - CORE_REPO_SHA: d0bb12b34b0c487198c935001636b6163485a50f + CORE_REPO_SHA: 2d1f0b9f5fce62549d1338882f37b91b95881c75 jobs: build: diff --git a/instrumentation/opentelemetry-instrumentation-sqlalchemy/tests/test_sqlalchemy_metrics.py b/instrumentation/opentelemetry-instrumentation-sqlalchemy/tests/test_sqlalchemy_metrics.py index e9dfd527f1..39e45945f7 100644 --- a/instrumentation/opentelemetry-instrumentation-sqlalchemy/tests/test_sqlalchemy_metrics.py +++ b/instrumentation/opentelemetry-instrumentation-sqlalchemy/tests/test_sqlalchemy_metrics.py @@ -12,18 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Optional, Sequence - import sqlalchemy from sqlalchemy.pool import QueuePool from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor -from opentelemetry.sdk.metrics._internal.point import Metric -from opentelemetry.sdk.metrics.export import ( - DataPointT, - HistogramDataPoint, - NumberDataPoint, -) from opentelemetry.test.test_base import TestBase @@ -38,93 +30,6 @@ def tearDown(self): super().tearDown() SQLAlchemyInstrumentor().uninstrument() - def get_sorted_metrics(self): - resource_metrics = ( - self.memory_metrics_reader.get_metrics_data().resource_metrics - ) - - all_metrics = [] - for metrics in resource_metrics: - for scope_metrics in metrics.scope_metrics: - all_metrics.extend(scope_metrics.metrics) - - return self.sorted_metrics(all_metrics) - - @staticmethod - def sorted_metrics(metrics): - """ - Sorts metrics by metric name. - """ - return sorted( - metrics, - key=lambda m: m.name, - ) - - def assert_metric_expected( - self, - metric: Metric, - expected_data_points: Sequence[DataPointT], - est_value_delta: Optional[float] = 0, - ): - self.assertEqual( - len(expected_data_points), len(metric.data.data_points) - ) - for expected_data_point in expected_data_points: - self.assert_data_point_expected( - expected_data_point, metric.data.data_points, est_value_delta - ) - - # pylint: disable=unidiomatic-typecheck - @staticmethod - def is_data_points_equal( - expected_data_point: DataPointT, - data_point: DataPointT, - est_value_delta: Optional[float] = 0, - ): - if type(expected_data_point) != type(data_point) or not isinstance( - expected_data_point, (HistogramDataPoint, NumberDataPoint) - ): - return False - - values_diff = None - if isinstance(data_point, HistogramDataPoint): - values_diff = abs(expected_data_point.sum - data_point.sum) - elif isinstance(data_point, NumberDataPoint): - values_diff = abs(expected_data_point.value - data_point.value) - - return ( - values_diff <= est_value_delta - and expected_data_point.attributes == dict(data_point.attributes) - ) - - def assert_data_point_expected( - self, - expected_data_point: DataPointT, - data_points: Sequence[DataPointT], - est_value_delta: Optional[float] = 0, - ): - is_data_point_exist = False - for data_point in data_points: - if self.is_data_points_equal( - expected_data_point, data_point, est_value_delta - ): - is_data_point_exist = True - break - - self.assertTrue( - is_data_point_exist, - msg=f"Data point {expected_data_point} does not exist", - ) - - @staticmethod - def create_number_data_point(value, attributes): - return NumberDataPoint( - value=value, - attributes=attributes, - start_time_unix_nano=0, - time_unix_nano=0, - ) - def assert_pool_idle_used_expected(self, pool_name, idle, used): metrics = self.get_sorted_metrics() self.assertEqual(len(metrics), 1) From 71b234e5e1f8e795cb1ab7be6e7f0e2f723b0c1f Mon Sep 17 00:00:00 2001 From: sroda Date: Sat, 25 Feb 2023 22:36:07 +0200 Subject: [PATCH 10/10] Fix merge issue --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f271801d7b..0537ee147e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Add metrics instrumentation for sqlalchemy ([#1645](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1645)) +- Fix exception in Urllib3 when dealing with filelike body. + ([#1399](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/1399)) + ### Added - Add connection attributes to sqlalchemy connect span