Merge pull request #261 from dalf/upgrade_httpx

[upd] upgrade httpx 0.19.0
pull/329/head
Alexandre Flament 3 years ago committed by GitHub
commit dc74df3a55
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -7,10 +7,10 @@ lxml==4.6.3
pygments==2.10.0 pygments==2.10.0
python-dateutil==2.8.2 python-dateutil==2.8.2
pyyaml==5.4.1 pyyaml==5.4.1
httpx[http2]==0.17.1 httpx[http2]==0.19.0
Brotli==1.0.9 Brotli==1.0.9
uvloop==0.16.0; python_version >= '3.7' uvloop==0.16.0; python_version >= '3.7'
uvloop==0.14.0; python_version < '3.7' uvloop==0.14.0; python_version < '3.7'
httpx-socks[asyncio]==0.3.1 httpx-socks[asyncio]==0.4.1
langdetect==1.0.9 langdetect==1.0.9
setproctitle==1.2.2 setproctitle==1.2.2

@ -74,9 +74,11 @@ def get_request_exception_messages(exc: HTTPError)\
status_code = None status_code = None
reason = None reason = None
hostname = None hostname = None
if hasattr(exc, 'request') and exc.request is not None: if hasattr(exc, '_request') and exc._request is not None:
# exc.request is property that raise an RuntimeException
# if exc._request is not defined.
url = exc.request.url url = exc.request.url
if url is None and hasattr(exc, 'response') and exc.respones is not None: if url is None and hasattr(exc, 'response') and exc.response is not None:
url = exc.response.url url = exc.response.url
if url is not None: if url is not None:
hostname = url.host hostname = url.host

@ -166,7 +166,7 @@ async def stream_chunk_to_queue(network, queue, method, url, **kwargs):
async for chunk in response.aiter_raw(65536): async for chunk in response.aiter_raw(65536):
if len(chunk) > 0: if len(chunk) > 0:
queue.put(chunk) queue.put(chunk)
except httpx.ResponseClosed: except httpx.StreamClosed:
# the response was queued before the exception. # the response was queued before the exception.
# the exception was raised on aiter_raw. # the exception was raised on aiter_raw.
# we do nothing here: in the finally block, None will be queued # we do nothing here: in the finally block, None will be queued

@ -5,6 +5,7 @@
import asyncio import asyncio
import logging import logging
import threading import threading
import httpcore import httpcore
import httpx import httpx
from httpx_socks import AsyncProxyTransport from httpx_socks import AsyncProxyTransport
@ -26,19 +27,22 @@ else:
uvloop.install() uvloop.install()
logger = logger.getChild('searx.http.client') logger = logger.getChild('searx.network.client')
LOOP = None LOOP = None
SSLCONTEXTS = {} SSLCONTEXTS = {}
TRANSPORT_KWARGS = { TRANSPORT_KWARGS = {
'backend': 'asyncio', # use anyio :
# * https://github.com/encode/httpcore/issues/344
# * https://github.com/encode/httpx/discussions/1511
'backend': 'anyio',
'trust_env': False, 'trust_env': False,
} }
# pylint: disable=protected-access # pylint: disable=protected-access
async def close_connections_for_url( async def close_connections_for_url(
connection_pool: httpcore.AsyncConnectionPool, connection_pool: httpcore.AsyncConnectionPool, url: httpcore._utils.URL
url: httpcore._utils.URL ): ):
origin = httpcore._utils.url_to_origin(url) origin = httpcore._utils.url_to_origin(url)
logger.debug('Drop connections for %r', origin) logger.debug('Drop connections for %r', origin)
@ -47,7 +51,7 @@ async def close_connections_for_url(
await connection_pool._remove_from_pool(connection) await connection_pool._remove_from_pool(connection)
try: try:
await connection.aclose() await connection.aclose()
except httpcore.NetworkError as e: except httpx.NetworkError as e:
logger.warning('Error closing an existing connection', exc_info=e) logger.warning('Error closing an existing connection', exc_info=e)
# pylint: enable=protected-access # pylint: enable=protected-access
@ -59,80 +63,78 @@ def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http
return SSLCONTEXTS[key] return SSLCONTEXTS[key]
class AsyncHTTPTransportNoHttp(httpcore.AsyncHTTPTransport): class AsyncHTTPTransportNoHttp(httpx.AsyncHTTPTransport):
"""Block HTTP request""" """Block HTTP request"""
async def arequest(self, method, url, headers=None, stream=None, ext=None): async def handle_async_request(
raise httpcore.UnsupportedProtocol("HTTP protocol is disabled") self, method, url, headers=None, stream=None, extensions=None
):
raise httpx.UnsupportedProtocol('HTTP protocol is disabled')
class AsyncProxyTransportFixed(AsyncProxyTransport): class AsyncProxyTransportFixed(AsyncProxyTransport):
"""Fix httpx_socks.AsyncProxyTransport """Fix httpx_socks.AsyncProxyTransport
Map python_socks exceptions to httpcore.ProxyError Map python_socks exceptions to httpx.ProxyError / httpx.ConnectError
Map socket.gaierror to httpcore.ConnectError
Note: keepalive_expiry is ignored, AsyncProxyTransport should call: Map socket.gaierror to httpx.ConnectError
* self._keepalive_sweep()
* self._response_closed(self, connection)
Note: AsyncProxyTransport inherit from AsyncConnectionPool Note: AsyncProxyTransport inherit from AsyncConnectionPool
Note: the API is going to change on httpx 0.18.0
see https://github.com/encode/httpx/pull/1522
""" """
async def arequest(self, method, url, headers=None, stream=None, ext=None): async def handle_async_request(
self, method, url, headers=None, stream=None, extensions=None
):
retry = 2 retry = 2
while retry > 0: while retry > 0:
retry -= 1 retry -= 1
try: try:
return await super().arequest(method, url, headers, stream, ext) return await super().handle_async_request(
method, url, headers=headers, stream=stream, extensions=extensions
)
except (ProxyConnectionError, ProxyTimeoutError, ProxyError) as e: except (ProxyConnectionError, ProxyTimeoutError, ProxyError) as e:
raise httpcore.ProxyError(e) raise httpx.ProxyError from e
except OSError as e: except OSError as e:
# socket.gaierror when DNS resolution fails # socket.gaierror when DNS resolution fails
raise httpcore.NetworkError(e) raise httpx.ConnectError from e
except httpcore.RemoteProtocolError as e: except httpx.NetworkError as e:
# in case of httpcore.RemoteProtocolError: Server disconnected # httpx.WriteError on HTTP/2 connection leaves a new opened stream
await close_connections_for_url(self, url)
logger.warning('httpcore.RemoteProtocolError: retry', exc_info=e)
# retry
except (httpcore.NetworkError, httpcore.ProtocolError) as e:
# httpcore.WriteError on HTTP/2 connection leaves a new opened stream
# then each new request creates a new stream and raise the same WriteError # then each new request creates a new stream and raise the same WriteError
await close_connections_for_url(self, url) await close_connections_for_url(self, url)
raise e raise e
except httpx.RemoteProtocolError as e:
# in case of httpx.RemoteProtocolError: Server disconnected
await close_connections_for_url(self, url)
logger.warning('httpx.RemoteProtocolError: retry', exc_info=e)
# retry
class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport): class AsyncHTTPTransportFixed(httpx.AsyncHTTPTransport):
"""Fix httpx.AsyncHTTPTransport""" """Fix httpx.AsyncHTTPTransport"""
async def arequest(self, method, url, headers=None, stream=None, ext=None): async def handle_async_request(
self, method, url, headers=None, stream=None, extensions=None
):
retry = 2 retry = 2
while retry > 0: while retry > 0:
retry -= 1 retry -= 1
try: try:
return await super().arequest(method, url, headers, stream, ext) return await super().handle_async_request(
method, url, headers=headers, stream=stream, extensions=extensions
)
except OSError as e: except OSError as e:
# socket.gaierror when DNS resolution fails # socket.gaierror when DNS resolution fails
raise httpcore.ConnectError(e) raise httpx.ConnectError from e
except httpcore.CloseError as e: except httpx.NetworkError as e:
# httpcore.CloseError: [Errno 104] Connection reset by peer # httpx.WriteError on HTTP/2 connection leaves a new opened stream
# raised by _keepalive_sweep() # then each new request creates a new stream and raise the same WriteError
# from https://github.com/encode/httpcore/blob/4b662b5c42378a61e54d673b4c949420102379f5/httpcore/_backends/asyncio.py#L198 # pylint: disable=line-too-long
await close_connections_for_url(self._pool, url) await close_connections_for_url(self._pool, url)
logger.warning('httpcore.CloseError: retry', exc_info=e) raise e
# retry except httpx.RemoteProtocolError as e:
except httpcore.RemoteProtocolError as e: # in case of httpx.RemoteProtocolError: Server disconnected
# in case of httpcore.RemoteProtocolError: Server disconnected
await close_connections_for_url(self._pool, url) await close_connections_for_url(self._pool, url)
logger.warning('httpcore.RemoteProtocolError: retry', exc_info=e) logger.warning('httpx.RemoteProtocolError: retry', exc_info=e)
# retry # retry
except (httpcore.ProtocolError, httpcore.NetworkError) as e:
await close_connections_for_url(self._pool, url)
raise e
def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries): def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries):
@ -178,15 +180,6 @@ def get_transport(verify, http2, local_address, proxy_url, limit, retries):
) )
def iter_proxies(proxies):
# https://www.python-httpx.org/compatibility/#proxy-keys
if isinstance(proxies, str):
yield 'all://', proxies
elif isinstance(proxies, dict):
for pattern, proxy_url in proxies.items():
yield pattern, proxy_url
def new_client( def new_client(
# pylint: disable=too-many-arguments # pylint: disable=too-many-arguments
enable_http, verify, enable_http2, enable_http, verify, enable_http2,
@ -199,8 +192,8 @@ def new_client(
) )
# See https://www.python-httpx.org/advanced/#routing # See https://www.python-httpx.org/advanced/#routing
mounts = {} mounts = {}
for pattern, proxy_url in iter_proxies(proxies): for pattern, proxy_url in proxies.items():
if not enable_http and (pattern == 'http' or pattern.startswith('http://')): if not enable_http and pattern.startswith('http://'):
continue continue
if (proxy_url.startswith('socks4://') if (proxy_url.startswith('socks4://')
or proxy_url.startswith('socks5://') or proxy_url.startswith('socks5://')

@ -138,12 +138,10 @@ class Network:
request = response.request request = response.request
status = f"{response.status_code} {response.reason_phrase}" status = f"{response.status_code} {response.reason_phrase}"
response_line = f"{response.http_version} {status}" response_line = f"{response.http_version} {status}"
if hasattr(response, "_elapsed"): content_type = response.headers.get("Content-Type")
elapsed_time = f"{response.elapsed.total_seconds()} sec" content_type = f' ({content_type})' if content_type else ''
else:
elapsed_time = "stream"
self._logger.debug( self._logger.debug(
f'HTTP Request: {request.method} {request.url} "{response_line}" ({elapsed_time})' f'HTTP Request: {request.method} {request.url} "{response_line}"{content_type}'
) )
def get_client(self, verify=None, max_redirects=None): def get_client(self, verify=None, max_redirects=None):

Loading…
Cancel
Save