Skip to content

Commit

Permalink
Merge pull request #14 from johtso/remove-print-statements
Browse files Browse the repository at this point in the history
Replace print statements with logging
  • Loading branch information
johtso committed Mar 18, 2022
2 parents 0a3a367 + e9626d3 commit b5e6a2b
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 8 deletions.
5 changes: 4 additions & 1 deletion httpx_caching/_async/_transport.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
from typing import Iterable, Optional, Tuple

import httpx
Expand All @@ -9,6 +10,8 @@
from httpx_caching._policy import CachingPolicy, Source
from httpx_caching._utils import ByteStreamWrapper

logger = logging.getLogger(__name__)


class AsyncCachingTransport(httpx.AsyncBaseTransport):
invalidating_methods = {"PUT", "PATCH", "DELETE"}
Expand Down Expand Up @@ -118,7 +121,7 @@ def wrap_response_stream(
response.stream = wrapped_stream

async def callback(response_body: bytes):
print("saving to cache:", key)
logger.debug("saving to cache:", key)
await self.cache.aset(key, response, vary_header_values, response_body)

response.stream.callback = callback
Expand Down
4 changes: 2 additions & 2 deletions httpx_caching/_policy.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def caching_policy(
cached_response, evaluation = yield from try_from_cache_policy(
request, cacheable_methods
)
print(f"evaluation: {evaluation}")
logger.debug(f"evaluation: {evaluation}")
if cached_response and evaluation == Evaluation.GOOD:
return cached_response, Source.CACHE

Expand Down Expand Up @@ -271,7 +271,7 @@ def try_from_server_policy(
cacheable_methods: Iterable[str],
) -> Generator[IOAction, Response, Tuple[Response, Source]]:
cache_key = get_cache_key(request)
print("we have this from the cache:", cached_response)
logger.debug("we have this from the cache:", cached_response)
updated_headers = request.headers.copy()
if cached_response:
# Add conditional headers based on cached response
Expand Down
5 changes: 4 additions & 1 deletion httpx_caching/_sync/_transport.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
from typing import Iterable, Optional, Tuple

import httpx
Expand All @@ -9,6 +10,8 @@
from httpx_caching._policy import CachingPolicy, Source
from httpx_caching._utils import ByteStreamWrapper

logger = logging.getLogger(__name__)


class SyncCachingTransport(httpx.BaseTransport):
invalidating_methods = {"PUT", "PATCH", "DELETE"}
Expand Down Expand Up @@ -116,7 +119,7 @@ def wrap_response_stream(
response.stream = wrapped_stream

def callback(response_body: bytes):
print("saving to cache:", key)
logger.debug("saving to cache:", key)
self.cache.set(key, response, vary_header_values, response_body)

response.stream.callback = callback
Expand Down
11 changes: 7 additions & 4 deletions httpx_caching/_utils.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
import threading
from typing import (
AsyncIterator,
Expand All @@ -14,6 +15,8 @@
import anyio
import httpx

logger = logging.getLogger(__name__)

AsyncLock = anyio.Lock
SyncLock = threading.Lock

Expand Down Expand Up @@ -77,9 +80,9 @@ async def async_callback_generator(
try:
yielded = next(gen)
while True:
print("action:", yielded)
logger.debug("action:", yielded)
to_send = await callback(yielded)
print("result:", to_send)
logger.debug("result:", to_send)
yielded = gen.send(to_send)
except StopIteration as e:
return e.value
Expand All @@ -94,9 +97,9 @@ def sync_callback_generator(
try:
yielded = next(gen)
while True:
print("action:", yielded)
logger.debug("action:", yielded)
to_send = callback(yielded)
print("result:", to_send)
logger.debug("result:", to_send)
yielded = gen.send(to_send)
except StopIteration as e:
return e.value
Expand Down

0 comments on commit b5e6a2b

Please sign in to comment.