diff --git a/httpx_caching/_async/_transport.py b/httpx_caching/_async/_transport.py index 3e26cb2..3d76c45 100644 --- a/httpx_caching/_async/_transport.py +++ b/httpx_caching/_async/_transport.py @@ -1,3 +1,4 @@ +import logging from typing import Iterable, Optional, Tuple import httpx @@ -9,6 +10,8 @@ from httpx_caching._policy import CachingPolicy, Source from httpx_caching._utils import ByteStreamWrapper +logger = logging.getLogger(__name__) + class AsyncCachingTransport(httpx.AsyncBaseTransport): invalidating_methods = {"PUT", "PATCH", "DELETE"} @@ -118,7 +121,7 @@ def wrap_response_stream( response.stream = wrapped_stream async def callback(response_body: bytes): - print("saving to cache:", key) + logger.debug("saving to cache:", key) await self.cache.aset(key, response, vary_header_values, response_body) response.stream.callback = callback diff --git a/httpx_caching/_policy.py b/httpx_caching/_policy.py index 79450b5..df0bd5e 100644 --- a/httpx_caching/_policy.py +++ b/httpx_caching/_policy.py @@ -126,7 +126,7 @@ def caching_policy( cached_response, evaluation = yield from try_from_cache_policy( request, cacheable_methods ) - print(f"evaluation: {evaluation}") + logger.debug(f"evaluation: {evaluation}") if cached_response and evaluation == Evaluation.GOOD: return cached_response, Source.CACHE @@ -271,7 +271,7 @@ def try_from_server_policy( cacheable_methods: Iterable[str], ) -> Generator[IOAction, Response, Tuple[Response, Source]]: cache_key = get_cache_key(request) - print("we have this from the cache:", cached_response) + logger.debug("we have this from the cache:", cached_response) updated_headers = request.headers.copy() if cached_response: # Add conditional headers based on cached response diff --git a/httpx_caching/_sync/_transport.py b/httpx_caching/_sync/_transport.py index a79971e..cdadd90 100644 --- a/httpx_caching/_sync/_transport.py +++ b/httpx_caching/_sync/_transport.py @@ -1,3 +1,4 @@ +import logging from typing import Iterable, Optional, Tuple import httpx @@ -9,6 +10,8 @@ from httpx_caching._policy import CachingPolicy, Source from httpx_caching._utils import ByteStreamWrapper +logger = logging.getLogger(__name__) + class SyncCachingTransport(httpx.BaseTransport): invalidating_methods = {"PUT", "PATCH", "DELETE"} @@ -116,7 +119,7 @@ def wrap_response_stream( response.stream = wrapped_stream def callback(response_body: bytes): - print("saving to cache:", key) + logger.debug("saving to cache:", key) self.cache.set(key, response, vary_header_values, response_body) response.stream.callback = callback diff --git a/httpx_caching/_utils.py b/httpx_caching/_utils.py index 460f0be..86fd9cb 100644 --- a/httpx_caching/_utils.py +++ b/httpx_caching/_utils.py @@ -1,3 +1,4 @@ +import logging import threading from typing import ( AsyncIterator, @@ -14,6 +15,8 @@ import anyio import httpx +logger = logging.getLogger(__name__) + AsyncLock = anyio.Lock SyncLock = threading.Lock @@ -77,9 +80,9 @@ async def async_callback_generator( try: yielded = next(gen) while True: - print("action:", yielded) + logger.debug("action:", yielded) to_send = await callback(yielded) - print("result:", to_send) + logger.debug("result:", to_send) yielded = gen.send(to_send) except StopIteration as e: return e.value @@ -94,9 +97,9 @@ def sync_callback_generator( try: yielded = next(gen) while True: - print("action:", yielded) + logger.debug("action:", yielded) to_send = callback(yielded) - print("result:", to_send) + logger.debug("result:", to_send) yielded = gen.send(to_send) except StopIteration as e: return e.value