diff --git a/docs/advanced.md b/docs/advanced.md index 809d84383c..8329cadfc0 100644 --- a/docs/advanced.md +++ b/docs/advanced.md @@ -471,3 +471,70 @@ If you do need to make HTTPS connections to a local server, for example to test >>> r Response <200 OK> ``` + +## Retries + +Communicating with a peer over a network is by essence subject to errors. HTTPX provides built-in retry functionality to increase the resilience to connection issues. + +Retries are disabled by default. When retries are enabled, HTTPX will retry sending the request up to the specified number of times. This behavior is restricted to **connection failures only**, i.e.: + +* Failures to establish or acquire a connection (`ConnectTimeout`, `PoolTimeout`). +* Failures to keep the connection open (`NetworkError`). + +!!! important + HTTPX will **NOT** retry on failures that aren't related to establishing or maintaining connections. This includes in particular: + + * Errors related to data transfer, such as `ReadTimeout` or `ProtocolError`. + * HTTP error responses (4xx, 5xx), such as `429 Too Many Requests` or `503 Service Unavailable`. + +If HTTPX could not get a response after the specified number of retries, a `TooManyRetries` exception is raised. + +The delay between each retry is increased exponentially to prevent overloading the requested host. + +### Enabling retries + +You can enable retries for a given request: + +```python +# Using the top-level API: +response = httpx.get("https://example.org", retries=3) + +# Using a client instance: +with httpx.Client() as client: + response = client.get("https://example.org", retries=3) +``` + +Or enable them on a client instance, which results in the given `retries` being used as a default for requests made with this client: + +```python +# Retry at most 3 times on connection failures everywhere. +with httpx.Client(retries=3) as client: + # This request now has retries enabled... + response = client.get("https://example.org") +``` + +When using a client with retries enabled, you can still explicitly override or disable retries for a given request: + +```python +with httpx.Client(retries=3) as client: + # Retry at most 5 times for this particular request. + response = client.get("https://example.org", retries=5) + + # Don't retry for this particular request. + response = client.get("https://example.org", retries=None) +``` + +### Fine-tuning the retries configuration + +When enabling retries, the `retries` argument can also be an `httpx.Retries()` instance. It accepts the following arguments: + +* An integer, given as a required positional argument, representing the maximum number of connection failures to retry on. +* `backoff_factor` (optional), which defines the increase rate of the time to wait between retries. By default this is `0.2`, which corresponds to issuing a new request after `(0s, 0.2s, 0.4s, 0.8s, ...)`. (Note that most connection failures are immediately resolved by retrying, so HTTPX will always issue the initial retry right away.) + +```python +# Retry at most 5 times on connection failures everywhere, +# and issue new requests after `(0s, 0.5s, 1s, 2s, 4s, ...)`. +retries = httpx.Retries(5, backoff_factor=0.5) +with httpx.Client(retries=retries) as client: + ... +``` diff --git a/httpx/__init__.py b/httpx/__init__.py index 4a133e8efd..092adf315c 100644 --- a/httpx/__init__.py +++ b/httpx/__init__.py @@ -2,7 +2,7 @@ from .api import delete, get, head, options, patch, post, put, request, stream from .auth import Auth, BasicAuth, DigestAuth from .client import AsyncClient, Client -from .config import PoolLimits, Proxy, Timeout +from .config import PoolLimits, Proxy, Retries, Timeout from .dispatch.asgi import ASGIDispatch from .dispatch.wsgi import WSGIDispatch from .exceptions import ( @@ -12,6 +12,7 @@ DecodingError, HTTPError, InvalidURL, + NetworkError, NotRedirectResponse, PoolTimeout, ProtocolError, @@ -25,6 +26,7 @@ StreamConsumed, TimeoutException, TooManyRedirects, + TooManyRetries, WriteTimeout, ) from .models import URL, Cookies, Headers, QueryParams, Request, Response @@ -54,12 +56,15 @@ "PoolLimits", "Proxy", "Timeout", + "Retries", + "TooManyRetries", "ConnectTimeout", "CookieConflict", "ConnectionClosed", "DecodingError", "HTTPError", "InvalidURL", + "NetworkError", "NotRedirectResponse", "PoolTimeout", "ProtocolError", diff --git a/httpx/api.py b/httpx/api.py index 7fbbd30811..50ab9ab5a6 100644 --- a/httpx/api.py +++ b/httpx/api.py @@ -2,7 +2,13 @@ from .auth import AuthTypes from .client import Client, StreamContextManager -from .config import DEFAULT_TIMEOUT_CONFIG, CertTypes, TimeoutTypes, VerifyTypes +from .config import ( + DEFAULT_TIMEOUT_CONFIG, + CertTypes, + RetriesTypes, + TimeoutTypes, + VerifyTypes, +) from .models import ( CookieTypes, HeaderTypes, @@ -26,6 +32,7 @@ def request( headers: HeaderTypes = None, cookies: CookieTypes = None, auth: AuthTypes = None, + retries: RetriesTypes = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, allow_redirects: bool = True, verify: VerifyTypes = True, @@ -54,6 +61,8 @@ def request( request. * **auth** - *(optional)* An authentication class to use when sending the request. + * **retries** - *(optional)* The maximum number of connection failures to + retry on. * **timeout** - *(optional)* The timeout configuration to use when sending the request. * **allow_redirects** - *(optional)* Enables or disables HTTP redirects. @@ -81,7 +90,7 @@ def request( ``` """ with Client( - cert=cert, verify=verify, timeout=timeout, trust_env=trust_env, + cert=cert, verify=verify, retries=retries, timeout=timeout, trust_env=trust_env, ) as client: return client.request( method=method, @@ -108,13 +117,14 @@ def stream( headers: HeaderTypes = None, cookies: CookieTypes = None, auth: AuthTypes = None, + retries: RetriesTypes = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, allow_redirects: bool = True, verify: VerifyTypes = True, cert: CertTypes = None, trust_env: bool = True, ) -> StreamContextManager: - client = Client(cert=cert, verify=verify, trust_env=trust_env) + client = Client(cert=cert, verify=verify, retries=retries, trust_env=trust_env) request = Request( method=method, url=url, @@ -145,6 +155,7 @@ def get( allow_redirects: bool = True, cert: CertTypes = None, verify: VerifyTypes = True, + retries: RetriesTypes = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, ) -> Response: @@ -166,6 +177,7 @@ def get( allow_redirects=allow_redirects, cert=cert, verify=verify, + retries=retries, timeout=timeout, trust_env=trust_env, ) @@ -181,6 +193,7 @@ def options( allow_redirects: bool = True, cert: CertTypes = None, verify: VerifyTypes = True, + retries: RetriesTypes = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, ) -> Response: @@ -202,6 +215,7 @@ def options( allow_redirects=allow_redirects, cert=cert, verify=verify, + retries=retries, timeout=timeout, trust_env=trust_env, ) @@ -217,6 +231,7 @@ def head( allow_redirects: bool = False, # Note: Differs to usual default. cert: CertTypes = None, verify: VerifyTypes = True, + retries: RetriesTypes = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, ) -> Response: @@ -240,6 +255,7 @@ def head( allow_redirects=allow_redirects, cert=cert, verify=verify, + retries=retries, timeout=timeout, trust_env=trust_env, ) @@ -258,6 +274,7 @@ def post( allow_redirects: bool = True, cert: CertTypes = None, verify: VerifyTypes = True, + retries: RetriesTypes = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, ) -> Response: @@ -279,6 +296,7 @@ def post( allow_redirects=allow_redirects, cert=cert, verify=verify, + retries=retries, timeout=timeout, trust_env=trust_env, ) @@ -297,6 +315,7 @@ def put( allow_redirects: bool = True, cert: CertTypes = None, verify: VerifyTypes = True, + retries: RetriesTypes = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, ) -> Response: @@ -318,6 +337,7 @@ def put( allow_redirects=allow_redirects, cert=cert, verify=verify, + retries=retries, timeout=timeout, trust_env=trust_env, ) @@ -336,6 +356,7 @@ def patch( allow_redirects: bool = True, cert: CertTypes = None, verify: VerifyTypes = True, + retries: RetriesTypes = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, ) -> Response: @@ -357,6 +378,7 @@ def patch( allow_redirects=allow_redirects, cert=cert, verify=verify, + retries=retries, timeout=timeout, trust_env=trust_env, ) @@ -372,6 +394,7 @@ def delete( allow_redirects: bool = True, cert: CertTypes = None, verify: VerifyTypes = True, + retries: RetriesTypes = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, trust_env: bool = True, ) -> Response: @@ -393,6 +416,7 @@ def delete( allow_redirects=allow_redirects, cert=cert, verify=verify, + retries=retries, timeout=timeout, trust_env=trust_env, ) diff --git a/httpx/backends/asyncio.py b/httpx/backends/asyncio.py index 8d1025748b..7371be7908 100644 --- a/httpx/backends/asyncio.py +++ b/httpx/backends/asyncio.py @@ -225,6 +225,9 @@ async def open_uds_stream( return SocketStream(stream_reader=stream_reader, stream_writer=stream_writer) + async def sleep(self, seconds: float) -> None: + await asyncio.sleep(seconds) + def time(self) -> float: loop = asyncio.get_event_loop() return loop.time() diff --git a/httpx/backends/auto.py b/httpx/backends/auto.py index 7a8c597822..935a2804d7 100644 --- a/httpx/backends/auto.py +++ b/httpx/backends/auto.py @@ -41,6 +41,9 @@ async def open_uds_stream( ) -> BaseSocketStream: return await self.backend.open_uds_stream(path, hostname, ssl_context, timeout) + async def sleep(self, seconds: float) -> None: + await self.backend.sleep(seconds) + def time(self) -> float: return self.backend.time() diff --git a/httpx/backends/base.py b/httpx/backends/base.py index 964d09449f..0c01709328 100644 --- a/httpx/backends/base.py +++ b/httpx/backends/base.py @@ -111,6 +111,9 @@ async def open_uds_stream( ) -> BaseSocketStream: raise NotImplementedError() # pragma: no cover + async def sleep(self, seconds: float) -> None: + raise NotImplementedError() # pragma: no cover + def time(self) -> float: raise NotImplementedError() # pragma: no cover diff --git a/httpx/backends/sync.py b/httpx/backends/sync.py new file mode 100644 index 0000000000..c2804e3f62 --- /dev/null +++ b/httpx/backends/sync.py @@ -0,0 +1,6 @@ +import time + + +class SyncBackend: + def sleep(self, seconds: float) -> None: + time.sleep(seconds) diff --git a/httpx/backends/trio.py b/httpx/backends/trio.py index 33e93e9677..e6bf208d63 100644 --- a/httpx/backends/trio.py +++ b/httpx/backends/trio.py @@ -131,6 +131,9 @@ async def open_uds_stream( raise ConnectTimeout() + async def sleep(self, seconds: float) -> None: + await trio.sleep(seconds) + def time(self) -> float: return trio.current_time() diff --git a/httpx/client.py b/httpx/client.py index 49914787a8..6909a35fd5 100644 --- a/httpx/client.py +++ b/httpx/client.py @@ -5,7 +5,8 @@ import hstspreload from .auth import Auth, AuthTypes, BasicAuth, FunctionAuth -from .backends.base import ConcurrencyBackend +from .backends.base import ConcurrencyBackend, lookup_backend +from .backends.sync import SyncBackend from .config import ( DEFAULT_MAX_REDIRECTS, DEFAULT_POOL_LIMITS, @@ -15,6 +16,8 @@ PoolLimits, ProxiesTypes, Proxy, + Retries, + RetriesTypes, Timeout, TimeoutTypes, UnsetType, @@ -33,6 +36,7 @@ RedirectLoop, RequestBodyUnavailable, TooManyRedirects, + TooManyRetries, ) from .models import ( URL, @@ -63,6 +67,7 @@ def __init__( params: QueryParamTypes = None, headers: HeaderTypes = None, cookies: CookieTypes = None, + retries: RetriesTypes = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, max_redirects: int = DEFAULT_MAX_REDIRECTS, base_url: URLTypes = None, @@ -80,6 +85,7 @@ def __init__( self._params = QueryParams(params) self._headers = Headers(headers) self._cookies = Cookies(cookies) + self.retries = Retries(retries) self.timeout = Timeout(timeout) self.max_redirects = max_redirects self.trust_env = trust_env @@ -418,6 +424,8 @@ class Client(BaseClient): file, key file, password). * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy URLs. + * **retries** - *(optional)* The maximum number of connection failures to + retry on. * **timeout** - *(optional)* The timeout configuration to use when sending requests. * **pool_limits** - *(optional)* The connection pool configuration to use @@ -444,6 +452,7 @@ def __init__( verify: VerifyTypes = True, cert: CertTypes = None, proxies: ProxiesTypes = None, + retries: RetriesTypes = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, max_redirects: int = DEFAULT_MAX_REDIRECTS, @@ -457,6 +466,7 @@ def __init__( params=params, headers=headers, cookies=cookies, + retries=retries, timeout=timeout, max_redirects=max_redirects, base_url=base_url, @@ -483,6 +493,7 @@ def __init__( ) for key, proxy in proxy_map.items() } + self.backend = SyncBackend() def init_dispatch( self, @@ -557,6 +568,7 @@ def request( cookies: CookieTypes = None, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: request = self.build_request( @@ -570,7 +582,11 @@ def request( cookies=cookies, ) return self.send( - request, auth=auth, allow_redirects=allow_redirects, timeout=timeout, + request, + auth=auth, + allow_redirects=allow_redirects, + retries=retries, + timeout=timeout, ) def send( @@ -580,6 +596,7 @@ def send( stream: bool = False, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: if request.url.scheme not in ("http", "https"): @@ -587,10 +604,16 @@ def send( timeout = self.timeout if isinstance(timeout, UnsetType) else Timeout(timeout) + retries = self.retries if isinstance(retries, UnsetType) else Retries(retries) + auth = self.build_auth(request, auth) - response = self.send_handling_redirects( - request, auth=auth, timeout=timeout, allow_redirects=allow_redirects, + response = self.send_handling_retries( + request, + auth=auth, + retries=retries, + timeout=timeout, + allow_redirects=allow_redirects, ) if not stream: @@ -601,6 +624,48 @@ def send( return response + def send_handling_retries( + self, + request: Request, + auth: Auth, + retries: Retries, + timeout: Timeout, + allow_redirects: bool = True, + ) -> Response: + if not retries.limit: + return self.send_handling_redirects( + request, auth=auth, timeout=timeout, allow_redirects=allow_redirects + ) + + backend = self.backend + retries_left = retries.limit + delays = retries.get_delays() + + while True: + try: + return self.send_handling_redirects( + request, + auth=auth, + timeout=timeout, + allow_redirects=allow_redirects, + ) + except HTTPError as exc: + if not retries.should_retry_on_exception(exc): + # Even if we have retries left, we're told to not even consider + # retrying in this case. So let's re-raise immediately to avoid + # polluting logs or the exception stack. + raise + + logger.debug(f"HTTP Request failed: {exc!r}") + + if not retries_left: + raise TooManyRetries(exc, request=request) + + retries_left -= 1 + delay = next(delays) + logger.debug(f"Retrying in {delay} seconds") + backend.sleep(delay) + def send_handling_redirects( self, request: Request, @@ -704,6 +769,7 @@ def get( cookies: CookieTypes = None, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: return self.request( @@ -714,6 +780,7 @@ def get( cookies=cookies, auth=auth, allow_redirects=allow_redirects, + retries=retries, timeout=timeout, ) @@ -726,6 +793,7 @@ def options( cookies: CookieTypes = None, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: return self.request( @@ -736,6 +804,7 @@ def options( cookies=cookies, auth=auth, allow_redirects=allow_redirects, + retries=retries, timeout=timeout, ) @@ -748,6 +817,7 @@ def head( cookies: CookieTypes = None, auth: AuthTypes = None, allow_redirects: bool = False, # NOTE: Differs to usual default. + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: return self.request( @@ -758,6 +828,7 @@ def head( cookies=cookies, auth=auth, allow_redirects=allow_redirects, + retries=retries, timeout=timeout, ) @@ -773,6 +844,7 @@ def post( cookies: CookieTypes = None, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: return self.request( @@ -786,6 +858,7 @@ def post( cookies=cookies, auth=auth, allow_redirects=allow_redirects, + retries=retries, timeout=timeout, ) @@ -801,6 +874,7 @@ def put( cookies: CookieTypes = None, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: return self.request( @@ -814,6 +888,7 @@ def put( cookies=cookies, auth=auth, allow_redirects=allow_redirects, + retries=retries, timeout=timeout, ) @@ -829,6 +904,7 @@ def patch( cookies: CookieTypes = None, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: return self.request( @@ -842,6 +918,7 @@ def patch( cookies=cookies, auth=auth, allow_redirects=allow_redirects, + retries=retries, timeout=timeout, ) @@ -854,6 +931,7 @@ def delete( cookies: CookieTypes = None, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: return self.request( @@ -864,6 +942,7 @@ def delete( cookies=cookies, auth=auth, allow_redirects=allow_redirects, + retries=retries, timeout=timeout, ) @@ -915,6 +994,8 @@ class AsyncClient(BaseClient): enabled. Defaults to `False`. * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy URLs. + * **retries** - *(optional)* The maximum number of connection failures to + retry on. * **timeout** - *(optional)* The timeout configuration to use when sending requests. * **pool_limits** - *(optional)* The connection pool configuration to use @@ -946,6 +1027,7 @@ def __init__( cert: CertTypes = None, http2: bool = False, proxies: ProxiesTypes = None, + retries: RetriesTypes = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, max_redirects: int = DEFAULT_MAX_REDIRECTS, @@ -961,6 +1043,7 @@ def __init__( params=params, headers=headers, cookies=cookies, + retries=retries, timeout=timeout, max_redirects=max_redirects, base_url=base_url, @@ -1081,6 +1164,7 @@ async def request( cookies: CookieTypes = None, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: request = self.build_request( @@ -1094,7 +1178,11 @@ async def request( cookies=cookies, ) response = await self.send( - request, auth=auth, allow_redirects=allow_redirects, timeout=timeout, + request, + auth=auth, + allow_redirects=allow_redirects, + retries=retries, + timeout=timeout, ) return response @@ -1105,6 +1193,7 @@ async def send( stream: bool = False, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: if request.url.scheme not in ("http", "https"): @@ -1112,10 +1201,16 @@ async def send( timeout = self.timeout if isinstance(timeout, UnsetType) else Timeout(timeout) + retries = self.retries if isinstance(retries, UnsetType) else Retries(retries) + auth = self.build_auth(request, auth) - response = await self.send_handling_redirects( - request, auth=auth, timeout=timeout, allow_redirects=allow_redirects, + response = await self.send_handling_retries( + request, + auth=auth, + retries=retries, + timeout=timeout, + allow_redirects=allow_redirects, ) if not stream: @@ -1126,6 +1221,49 @@ async def send( return response + async def send_handling_retries( + self, + request: Request, + auth: Auth, + retries: Retries, + timeout: Timeout, + allow_redirects: bool = True, + ) -> Response: + if not retries.limit: + return await self.send_handling_redirects( + request, auth=auth, timeout=timeout, allow_redirects=allow_redirects + ) + + backend = lookup_backend() + + retries_left = retries.limit + delays = retries.get_delays() + + while True: + try: + return await self.send_handling_redirects( + request, + auth=auth, + timeout=timeout, + allow_redirects=allow_redirects, + ) + except HTTPError as exc: + if not retries.should_retry_on_exception(exc): + # Even if we have retries left, we're told to not even consider + # retrying in this case. So let's re-raise immediately to avoid + # polluting logs or the exception stack. + raise + + logger.debug(f"HTTP Request failed: {exc!r}") + + if not retries_left: + raise TooManyRetries(exc, request=request) + + retries_left -= 1 + delay = next(delays) + logger.debug(f"Retrying in {delay} seconds") + await backend.sleep(delay) + async def send_handling_redirects( self, request: Request, @@ -1231,6 +1369,7 @@ async def get( cookies: CookieTypes = None, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: return await self.request( @@ -1241,6 +1380,7 @@ async def get( cookies=cookies, auth=auth, allow_redirects=allow_redirects, + retries=retries, timeout=timeout, ) @@ -1253,6 +1393,7 @@ async def options( cookies: CookieTypes = None, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: return await self.request( @@ -1263,6 +1404,7 @@ async def options( cookies=cookies, auth=auth, allow_redirects=allow_redirects, + retries=retries, timeout=timeout, ) @@ -1275,6 +1417,7 @@ async def head( cookies: CookieTypes = None, auth: AuthTypes = None, allow_redirects: bool = False, # NOTE: Differs to usual default. + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: return await self.request( @@ -1285,6 +1428,7 @@ async def head( cookies=cookies, auth=auth, allow_redirects=allow_redirects, + retries=retries, timeout=timeout, ) @@ -1300,6 +1444,7 @@ async def post( cookies: CookieTypes = None, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: return await self.request( @@ -1313,6 +1458,7 @@ async def post( cookies=cookies, auth=auth, allow_redirects=allow_redirects, + retries=retries, timeout=timeout, ) @@ -1328,6 +1474,7 @@ async def put( cookies: CookieTypes = None, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: return await self.request( @@ -1341,6 +1488,7 @@ async def put( cookies=cookies, auth=auth, allow_redirects=allow_redirects, + retries=retries, timeout=timeout, ) @@ -1356,6 +1504,7 @@ async def patch( cookies: CookieTypes = None, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: return await self.request( @@ -1369,6 +1518,7 @@ async def patch( cookies=cookies, auth=auth, allow_redirects=allow_redirects, + retries=retries, timeout=timeout, ) @@ -1381,6 +1531,7 @@ async def delete( cookies: CookieTypes = None, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, ) -> Response: return await self.request( @@ -1391,6 +1542,7 @@ async def delete( cookies=cookies, auth=auth, allow_redirects=allow_redirects, + retries=retries, timeout=timeout, ) @@ -1417,6 +1569,7 @@ def __init__( *, auth: AuthTypes = None, allow_redirects: bool = True, + retries: typing.Union[RetriesTypes, UnsetType] = UNSET, timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, close_client: bool = False, ) -> None: @@ -1424,6 +1577,7 @@ def __init__( self.request = request self.auth = auth self.allow_redirects = allow_redirects + self.retries = retries self.timeout = timeout self.close_client = close_client @@ -1433,6 +1587,7 @@ def __enter__(self) -> "Response": request=self.request, auth=self.auth, allow_redirects=self.allow_redirects, + retries=self.retries, timeout=self.timeout, stream=True, ) @@ -1455,6 +1610,7 @@ async def __aenter__(self) -> "Response": request=self.request, auth=self.auth, allow_redirects=self.allow_redirects, + retries=self.retries, timeout=self.timeout, stream=True, ) diff --git a/httpx/config.py b/httpx/config.py index 64c3c3307d..4f5c89e2bd 100644 --- a/httpx/config.py +++ b/httpx/config.py @@ -1,3 +1,4 @@ +import itertools import os import ssl import typing @@ -6,6 +7,7 @@ import certifi +from .exceptions import ConnectTimeout, HTTPError, NetworkError, PoolTimeout from .models import URL, Headers, HeaderTypes, URLTypes from .utils import get_ca_bundle_from_env, get_logger @@ -17,6 +19,7 @@ ProxiesTypes = typing.Union[ URLTypes, "Proxy", typing.Dict[URLTypes, typing.Union[URLTypes, "Proxy"]] ] +RetriesTypes = typing.Union[None, int, "Retries"] DEFAULT_CIPHERS = ":".join( @@ -352,6 +355,77 @@ def __repr__(self) -> str: ) +class Retries: + """ + Retries configuration. + + Defines the maximum amount of connection failures to retry on, and + implements a configurable exponential backoff algorithm. + """ + + _RETRYABLE_EXCEPTIONS: typing.Sequence[typing.Type[HTTPError]] = ( + ConnectTimeout, + PoolTimeout, + NetworkError, + ) + _RETRYABLE_METHODS: typing.Container[str] = frozenset( + ("HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE") + ) + + def __init__( + self, retries: RetriesTypes = None, *, backoff_factor: float = None + ) -> None: + if retries is None: + limit = 0 + elif isinstance(retries, int): + limit = retries + else: + assert isinstance(retries, Retries) + assert backoff_factor is None + backoff_factor = retries.backoff_factor + limit = retries.limit + + if backoff_factor is None: + backoff_factor = 0.2 + + assert limit >= 0 + assert backoff_factor > 0 + + self.limit: int = limit + self.backoff_factor: float = backoff_factor + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, Retries) + and self.limit == other.limit + and self.backoff_factor == other.backoff_factor + ) + + @classmethod + def should_retry_on_exception(cls, exc: HTTPError) -> bool: + is_retryable_exception_class = any( + isinstance(exc, exc_cls) for exc_cls in cls._RETRYABLE_EXCEPTIONS + ) + + if not is_retryable_exception_class: + return False + + assert exc.request is not None + method = exc.request.method.upper() + if method not in cls._RETRYABLE_METHODS: + return False + + return True + + def get_delays(self) -> typing.Iterator[float]: + """ + Used by clients to determine how long to wait before issuing a new request. + """ + yield 0 # Retry immediately. + for n in itertools.count(2): + yield self.backoff_factor * (2 ** (n - 2)) + + DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0) DEFAULT_POOL_LIMITS = PoolLimits(soft_limit=10, hard_limit=100) DEFAULT_MAX_REDIRECTS = 20 diff --git a/httpx/dispatch/urllib3.py b/httpx/dispatch/urllib3.py index 2728170c14..d37f9243ff 100644 --- a/httpx/dispatch/urllib3.py +++ b/httpx/dispatch/urllib3.py @@ -4,7 +4,7 @@ import typing import urllib3 -from urllib3.exceptions import MaxRetryError, SSLError +from urllib3.exceptions import NewConnectionError, SSLError from ..config import ( DEFAULT_POOL_LIMITS, @@ -94,7 +94,7 @@ def send(self, request: Request, timeout: Timeout = None) -> Response: content_length = int(request.headers.get("Content-Length", "0")) body = request.stream if chunked or content_length else None - with as_network_error(MaxRetryError, SSLError, socket.error): + with as_network_error(NewConnectionError, SSLError, socket.error): conn = self.pool.urlopen( method=request.method, url=str(request.url), @@ -102,7 +102,7 @@ def send(self, request: Request, timeout: Timeout = None) -> Response: body=body, redirect=False, assert_same_host=False, - retries=0, + retries=False, preload_content=False, chunked=chunked, timeout=urllib3_timeout, diff --git a/httpx/exceptions.py b/httpx/exceptions.py index 7efe6fb3c9..951636b0b7 100644 --- a/httpx/exceptions.py +++ b/httpx/exceptions.py @@ -113,6 +113,15 @@ class NotRedirectResponse(RedirectError): """ +# Retries... + + +class TooManyRetries(HTTPError): + """ + The maximum number of retries allowed for a request was exceeded. + """ + + # Stream exceptions... diff --git a/httpx/retries.py b/httpx/retries.py new file mode 100644 index 0000000000..23ad284798 --- /dev/null +++ b/httpx/retries.py @@ -0,0 +1,191 @@ +import typing + +from .exceptions import ( + ConnectTimeout, + HTTPError, + NetworkError, + PoolTimeout, + TooManyRetries, +) +from .models import Request, Response +from .utils import get_logger + +logger = get_logger(__name__) + + +class RetryLimits: + """ + Base class for retry limiting policies. + """ + + def retry_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + """ + Execute the retry flow. + + To dispatch a request, you should `yield` it, and prepare for the following + situations: + + * The request resulted in an `httpx.HTTPError`. If it should be retried on, + you should make any necessary modifications to the request, and continue + yielding. If you've exceeded the maximum number of retries, wrap the error + in `httpx.TooManyRetries()` and raise the result. If it shouldn't be retried + on, re-`raise` the error as-is. + * The request went through and resulted in the client sending back a `response`. + If it should be retried on (e.g. because it is an error response), you + should make any necessary modifications to the request, and continue yielding. + Otherwise, `return` to terminate the retry flow. + + Note that modifying the request may cause downstream mechanisms that rely + on request signing to fail. For example, this could be the case of + certain authentication schemes. + + A typical pseudo-code implementation based on a while-loop and try/except + blocks may look like this... + + ```python + while True: + try: + response = yield request + except httpx.HTTPError as exc: + if not has_retries_left(): + raise TooManyRetries(exc) + if should_retry_on_exception(exc): + increment_retries_left() + # (Optionally modify the request here.) + continue + else: + raise + else: + if should_retry_on_response(response): + # (Optionally modify the request here.) + continue + return + ``` + """ + raise NotImplementedError + + def __or__(self, other: typing.Any) -> "RetryLimits": + if not isinstance(other, RetryLimits): + raise NotImplementedError + return _OrRetries(self, other) + + +class _OrRetries(RetryLimits): + """ + Helper for composing retry limits. + """ + + def __init__(self, left: RetryLimits, right: RetryLimits) -> None: + self.left = left + self.right = right + + def retry_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + left_flow = self.left.retry_flow(request) + right_flow = self.right.retry_flow(request) + + request = next(left_flow) + request = next(right_flow) + + while True: + try: + response = yield request + except HTTPError as exc: + try: + request = left_flow.throw(type(exc), exc, exc.__traceback__) + except TooManyRetries: + raise + except HTTPError: + try: + request = right_flow.throw(type(exc), exc, exc.__traceback__) + except TooManyRetries: + raise + except HTTPError: + raise + else: + continue + else: + continue + else: + try: + request = left_flow.send(response) + except TooManyRetries: + raise + except StopIteration: + try: + request = right_flow.send(response) + except TooManyRetries: + raise + except StopIteration: + return + else: + continue + else: + continue + + +class DontRetry(RetryLimits): + def retry_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + # Send the initial request, and never retry. + # Don't raise a `TooManyRetries` exception because this should really be + # a no-op implementation. + yield request + + +class RetryOnConnectionFailures(RetryLimits): + """ + Retry when failing to establish a connection, or when a network + error occurred. + """ + + _RETRYABLE_EXCEPTIONS: typing.Sequence[typing.Type[HTTPError]] = ( + ConnectTimeout, + PoolTimeout, + NetworkError, + ) + _RETRYABLE_METHODS: typing.Container[str] = frozenset( + ("HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE") + ) + + def __init__(self, limit: int = 3) -> None: + assert limit >= 0 + self.limit = limit + + def _should_retry_on_exception(self, exc: HTTPError) -> bool: + for exc_cls in self._RETRYABLE_EXCEPTIONS: + if isinstance(exc, exc_cls): + break + else: + logger.debug(f"not_retryable exc_type={type(exc)}") + return False + + assert exc.request is not None + method = exc.request.method.upper() + if method not in self._RETRYABLE_METHODS: + logger.debug(f"not_retryable method={method!r}") + return False + + return True + + def retry_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + retries_left = self.limit + + while True: + try: + _ = yield request + except HTTPError as exc: + # Failed to get a response... + + if not retries_left: + raise TooManyRetries(exc, request=request) + + if self._should_retry_on_exception(exc): + retries_left -= 1 + continue + + # Raise the exception for other retry limits involved to handle, + # or for bubbling up to the client. + raise + else: + # We managed to get a response without connection/network + # failures, so we're done here. + return diff --git a/tests/client/test_retries.py b/tests/client/test_retries.py new file mode 100644 index 0000000000..ee00f652d6 --- /dev/null +++ b/tests/client/test_retries.py @@ -0,0 +1,144 @@ +import collections +import itertools +import typing + +import pytest + +import httpx +from httpx.config import TimeoutTypes +from httpx.dispatch.base import AsyncDispatcher +from httpx.retries import DontRetry, RetryOnConnectionFailures + + +class MockDispatch(AsyncDispatcher): + _ENDPOINTS: typing.Dict[str, typing.Type[httpx.HTTPError]] = { + "/connect_timeout": httpx.ConnectTimeout, + "/pool_timeout": httpx.PoolTimeout, + "/network_error": httpx.NetworkError, + } + + def __init__(self, succeed_after: int) -> None: + self.succeed_after = succeed_after + self.attempts: typing.DefaultDict[str, int] = collections.defaultdict(int) + + async def send( + self, request: httpx.Request, timeout: TimeoutTypes = None + ) -> httpx.Response: + assert request.url.path in self._ENDPOINTS + + exc_cls = self._ENDPOINTS[request.url.path] + + if self.attempts[request.url.path] < self.succeed_after: + self.attempts[request.url.path] += 1 + raise exc_cls(request=request) + + return httpx.Response(httpx.codes.OK, request=request) + + +@pytest.mark.usefixtures("async_environment") +async def test_no_retries() -> None: + client = httpx.AsyncClient(dispatch=MockDispatch(succeed_after=3), retries=0) + + with pytest.raises(httpx.ConnectTimeout): + await client.get("https://example.com/connect_timeout") + + with pytest.raises(httpx.PoolTimeout): + await client.get("https://example.com/pool_timeout") + + with pytest.raises(httpx.NetworkError): + await client.get("https://example.com/network_error") + + +@pytest.mark.usefixtures("async_environment") +async def test_default_retries() -> None: + client = httpx.AsyncClient(dispatch=MockDispatch(succeed_after=3)) + + response = await client.get("https://example.com/connect_timeout") + assert response.status_code == 200 + + response = await client.get("https://example.com/pool_timeout") + assert response.status_code == 200 + + response = await client.get("https://example.com/network_error") + assert response.status_code == 200 + + +@pytest.mark.usefixtures("async_environment") +async def test_too_many_retries() -> None: + client = httpx.AsyncClient(dispatch=MockDispatch(succeed_after=2), retries=1) + + with pytest.raises(httpx.TooManyRetries): + await client.get("https://example.com/connect_timeout") + + with pytest.raises(httpx.TooManyRetries): + await client.get("https://example.com/pool_timeout") + + with pytest.raises(httpx.TooManyRetries): + await client.get("https://example.com/network_error") + + +@pytest.mark.usefixtures("async_environment") +@pytest.mark.parametrize("method", ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]) +async def test_retries_idempotent_methods(method: str) -> None: + client = httpx.AsyncClient(dispatch=MockDispatch(succeed_after=1)) + response = await client.request(method, "https://example.com/connect_timeout") + assert response.status_code == 200 + + +@pytest.mark.usefixtures("async_environment") +async def test_no_retries_non_idempotent_methods() -> None: + client = httpx.AsyncClient(dispatch=MockDispatch(succeed_after=1)) + + with pytest.raises(httpx.ConnectTimeout): + await client.post("https://example.com/connect_timeout") + + with pytest.raises(httpx.PoolTimeout): + await client.patch("https://example.com/pool_timeout") + + +@pytest.mark.parametrize( + "retries, delays", + [ + (httpx.Retries(), [0, 0, 0.2, 0.4, 0.8, 1.6]), + (httpx.Retries(backoff_factor=0.1), [0, 0, 0.1, 0.2, 0.4, 0.8]), + ], +) +def test_retries_delays_sequence( + retries: httpx.Retries, delays: typing.List[int] +) -> None: + sample_delays = list(itertools.islice(retries.get_delays(), 6)) + assert sample_delays == delays + + +@pytest.mark.usefixtures("async_environment") +@pytest.mark.parametrize( + "retries, elapsed", + [ + (httpx.Retries(), pytest.approx(0 + 0 + 0.2 + 0.4, rel=0.1)), + (httpx.Retries(backoff_factor=0.1), pytest.approx(0 + 0 + 0.1 + 0.2, rel=0.2)), + ], +) +async def test_retries_backoff(retries: httpx.Retries, elapsed: float) -> None: + client = httpx.AsyncClient(dispatch=MockDispatch(succeed_after=3), retries=retries) + response = await client.get("https://example.com/connect_timeout") + assert response.status_code == 200 + assert response.elapsed.total_seconds() == elapsed + + +def test_retries_config() -> None: + client = httpx.AsyncClient() + assert client.retries == httpx.Retries() == httpx.Retries(3) + assert client.retries.limits == RetryOnConnectionFailures(3) + assert client.retries.backoff_factor == 0.2 + + client = httpx.AsyncClient(retries=0) + assert client.retries == httpx.Retries(0) + assert client.retries.limits == DontRetry() + + client = httpx.AsyncClient(retries=httpx.Retries(2, backoff_factor=0.1)) + assert client.retries == httpx.Retries(2, backoff_factor=0.1) + assert client.retries.limits == RetryOnConnectionFailures(2) + assert client.retries.backoff_factor == 0.1 + + +# TODO: test custom retry flow that retries on responses.