Skip to content

Commit

Permalink
[upd] upgrade httpx 0.19.0
Browse files Browse the repository at this point in the history
adjust searx.network module to the new internal API
see encode/httpx#1522
  • Loading branch information
dalf committed Sep 16, 2021
1 parent 602cbc2 commit c6a61ab
Show file tree
Hide file tree
Showing 4 changed files with 49 additions and 46 deletions.
4 changes: 2 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@ lxml==4.6.3
pygments==2.10.0
python-dateutil==2.8.2
pyyaml==5.4.1
httpx[http2]==0.17.1
httpx[http2]==0.19.0
Brotli==1.0.9
uvloop==0.16.0; python_version >= '3.7'
uvloop==0.14.0; python_version < '3.7'
httpx-socks[asyncio]==0.3.1
httpx-socks[asyncio]==0.4.1
langdetect==1.0.9
setproctitle==1.2.2
2 changes: 1 addition & 1 deletion searx/network/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ async def stream_chunk_to_queue(network, queue, method, url, **kwargs):
async for chunk in response.aiter_raw(65536):
if len(chunk) > 0:
queue.put(chunk)
except httpx.ResponseClosed:
except httpx.StreamClosed:
# the response was queued before the exception.
# the exception was raised on aiter_raw.
# we do nothing here: in the finally block, None will be queued
Expand Down
81 changes: 43 additions & 38 deletions searx/network/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import asyncio
import logging
import threading

import anyio
import httpcore
import httpx
from httpx_socks import AsyncProxyTransport
Expand All @@ -30,15 +32,18 @@
LOOP = None
SSLCONTEXTS = {}
TRANSPORT_KWARGS = {
'backend': 'asyncio',
# use anyio :
# * https://github.com/encode/httpcore/issues/344
# * https://github.com/encode/httpx/discussions/1511
'backend': 'anyio',
'trust_env': False,
}


# pylint: disable=protected-access
async def close_connections_for_url(
connection_pool: httpcore.AsyncConnectionPool,
url: httpcore._utils.URL ):
connection_pool: httpcore.AsyncConnectionPool, url: httpcore._utils.URL
):

origin = httpcore._utils.url_to_origin(url)
logger.debug('Drop connections for %r', origin)
Expand All @@ -63,77 +68,77 @@ def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http
class AsyncHTTPTransportNoHttp(httpcore.AsyncHTTPTransport):
"""Block HTTP request"""

async def arequest(self, method, url, headers=None, stream=None, ext=None):
raise httpcore.UnsupportedProtocol("HTTP protocol is disabled")
async def handle_async_request(
self, method, url, headers=None, stream=None, extensions=None
):
raise httpcore.UnsupportedProtocol('HTTP protocol is disabled')


class AsyncProxyTransportFixed(AsyncProxyTransport):
"""Fix httpx_socks.AsyncProxyTransport
Map python_socks exceptions to httpcore.ProxyError
Map python_socks exceptions to httpcore.ProxyError / httpcore.ConnectError
Map socket.gaierror to httpcore.ConnectError
Note: keepalive_expiry is ignored, AsyncProxyTransport should call:
* self._keepalive_sweep()
* self._response_closed(self, connection)
Note: AsyncProxyTransport inherit from AsyncConnectionPool
Note: the API is going to change on httpx 0.18.0
see https://github.com/encode/httpx/pull/1522
"""

async def arequest(self, method, url, headers=None, stream=None, ext=None):
async def handle_async_request(
self, method, url, headers=None, stream=None, extensions=None
):
retry = 2
while retry > 0:
retry -= 1
try:
return await super().arequest(method, url, headers, stream, ext)
return await super().handle_async_request(
method, url, headers=headers, stream=stream, extensions=extensions
)
except (ProxyConnectionError, ProxyTimeoutError, ProxyError) as e:
raise httpcore.ProxyError(e)
raise httpcore.ProxyError from e
except OSError as e:
# socket.gaierror when DNS resolution fails
raise httpcore.NetworkError(e)
except httpcore.RemoteProtocolError as e:
raise httpcore.ConnectError from e
except httpcore.NetworkError as e:
# https://github.com/encode/httpcore/pull/313
await close_connections_for_url(self, url)
raise e
except (httpcore.RemoteProtocolError, anyio.BrokenResourceError) as e:
# in case of httpcore.RemoteProtocolError: Server disconnected
# https://github.com/encode/httpcore/pull/129
# or https://github.com/encode/httpcore/issues/382
await close_connections_for_url(self, url)
logger.warning('httpcore.RemoteProtocolError: retry', exc_info=e)
# retry
except (httpcore.NetworkError, httpcore.ProtocolError) as e:
# httpcore.WriteError on HTTP/2 connection leaves a new opened stream
# then each new request creates a new stream and raise the same WriteError
await close_connections_for_url(self, url)
raise e


class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport):
"""Fix httpx.AsyncHTTPTransport"""

async def arequest(self, method, url, headers=None, stream=None, ext=None):
async def handle_async_request(
self, method, url, headers=None, stream=None, extensions=None
):
retry = 2
while retry > 0:
retry -= 1
try:
return await super().arequest(method, url, headers, stream, ext)
return await super().handle_async_request(
method, url, headers=headers, stream=stream, extensions=extensions
)
except OSError as e:
# socket.gaierror when DNS resolution fails
raise httpcore.ConnectError(e)
except httpcore.CloseError as e:
# httpcore.CloseError: [Errno 104] Connection reset by peer
# raised by _keepalive_sweep()
# from https://github.com/encode/httpcore/blob/4b662b5c42378a61e54d673b4c949420102379f5/httpcore/_backends/asyncio.py#L198 # pylint: disable=line-too-long
await close_connections_for_url(self._pool, url)
logger.warning('httpcore.CloseError: retry', exc_info=e)
# retry
except httpcore.RemoteProtocolError as e:
raise httpcore.ConnectError from e
except httpcore.NetworkError as e:
# https://github.com/encode/httpcore/pull/313
await close_connections_for_url(self, url)
raise e
except (httpcore.RemoteProtocolError, anyio.BrokenResourceError) as e:
# in case of httpcore.RemoteProtocolError: Server disconnected
# https://github.com/encode/httpcore/pull/129
# or https://github.com/encode/httpcore/issues/382
await close_connections_for_url(self._pool, url)
logger.warning('httpcore.RemoteProtocolError: retry', exc_info=e)
# retry
except (httpcore.ProtocolError, httpcore.NetworkError) as e:
await close_connections_for_url(self._pool, url)
raise e


def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries):
Expand Down Expand Up @@ -206,7 +211,7 @@ def new_client(
if not enable_http and (pattern == 'http' or pattern.startswith('http://')):
continue
if (proxy_url.startswith('socks4://')
or proxy_url.startswith('socks5://')
or proxy_url.startswith('socks5://')
or proxy_url.startswith('socks5h://')
):
mounts[pattern] = get_transport_for_socks_proxy(
Expand Down
8 changes: 3 additions & 5 deletions searx/network/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,12 +138,10 @@ async def log_response(self, response: httpx.Response):
request = response.request
status = f"{response.status_code} {response.reason_phrase}"
response_line = f"{response.http_version} {status}"
if hasattr(response, "_elapsed"):
elapsed_time = f"{response.elapsed.total_seconds()} sec"
else:
elapsed_time = "stream"
content_type = response.headers.get("Content-Type")
content_type = f' ({content_type})' if content_type else ''
self._logger.debug(
f'HTTP Request: {request.method} {request.url} "{response_line}" ({elapsed_time})'
f'HTTP Request: {request.method} {request.url} "{response_line}"{content_type}'
)

def get_client(self, verify=None, max_redirects=None):
Expand Down

0 comments on commit c6a61ab

Please sign in to comment.