|
|
@ -1,47 +1,44 @@
|
|
|
|
from __future__ import annotations
|
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
|
|
|
|
import warnings, json, asyncio
|
|
|
|
import warnings
|
|
|
|
|
|
|
|
import json
|
|
|
|
|
|
|
|
import asyncio
|
|
|
|
from functools import partialmethod
|
|
|
|
from functools import partialmethod
|
|
|
|
from asyncio import Future, Queue
|
|
|
|
from asyncio import Future, Queue
|
|
|
|
from typing import AsyncGenerator
|
|
|
|
from typing import AsyncGenerator
|
|
|
|
|
|
|
|
|
|
|
|
from curl_cffi.requests import AsyncSession, Response
|
|
|
|
from curl_cffi.requests import AsyncSession, Response
|
|
|
|
|
|
|
|
|
|
|
|
import curl_cffi
|
|
|
|
import curl_cffi
|
|
|
|
|
|
|
|
|
|
|
|
is_newer_0_5_8 = hasattr(AsyncSession, "_set_cookies") or hasattr(curl_cffi.requests.Cookies, "get_cookies_for_curl")
|
|
|
|
is_newer_0_5_8: bool = hasattr(AsyncSession, "_set_cookies") or hasattr(curl_cffi.requests.Cookies, "get_cookies_for_curl")
|
|
|
|
is_newer_0_5_9 = hasattr(curl_cffi.AsyncCurl, "remove_handle")
|
|
|
|
is_newer_0_5_9: bool = hasattr(curl_cffi.AsyncCurl, "remove_handle")
|
|
|
|
is_newer_0_5_10 = hasattr(AsyncSession, "release_curl")
|
|
|
|
is_newer_0_5_10: bool = hasattr(AsyncSession, "release_curl")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class StreamResponse:
|
|
|
|
class StreamResponse:
|
|
|
|
def __init__(self, inner: Response, queue: Queue):
|
|
|
|
def __init__(self, inner: Response, queue: Queue[bytes]) -> None:
|
|
|
|
self.inner = inner
|
|
|
|
self.inner: Response = inner
|
|
|
|
self.queue = queue
|
|
|
|
self.queue: Queue[bytes] = queue
|
|
|
|
self.request = inner.request
|
|
|
|
self.request = inner.request
|
|
|
|
self.status_code = inner.status_code
|
|
|
|
self.status_code: int = inner.status_code
|
|
|
|
self.reason = inner.reason
|
|
|
|
self.reason: str = inner.reason
|
|
|
|
self.ok = inner.ok
|
|
|
|
self.ok: bool = inner.ok
|
|
|
|
self.headers = inner.headers
|
|
|
|
self.headers = inner.headers
|
|
|
|
self.cookies = inner.cookies
|
|
|
|
self.cookies = inner.cookies
|
|
|
|
|
|
|
|
|
|
|
|
async def text(self) -> str:
|
|
|
|
async def text(self) -> str:
|
|
|
|
content = await self.read()
|
|
|
|
content: bytes = await self.read()
|
|
|
|
return content.decode()
|
|
|
|
return content.decode()
|
|
|
|
|
|
|
|
|
|
|
|
def raise_for_status(self):
|
|
|
|
def raise_for_status(self) -> None:
|
|
|
|
if not self.ok:
|
|
|
|
if not self.ok:
|
|
|
|
raise RuntimeError(f"HTTP Error {self.status_code}: {self.reason}")
|
|
|
|
raise RuntimeError(f"HTTP Error {self.status_code}: {self.reason}")
|
|
|
|
|
|
|
|
|
|
|
|
async def json(self, **kwargs):
|
|
|
|
async def json(self, **kwargs) -> dict:
|
|
|
|
return json.loads(await self.read(), **kwargs)
|
|
|
|
return json.loads(await self.read(), **kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
async def iter_lines(self, chunk_size=None, decode_unicode=False, delimiter=None) -> AsyncGenerator[bytes]:
|
|
|
|
async def iter_lines(self, chunk_size=None, decode_unicode=False, delimiter=None) -> AsyncGenerator[bytes, None]:
|
|
|
|
"""
|
|
|
|
pending: bytes = None
|
|
|
|
Copied from: https://requests.readthedocs.io/en/latest/_modules/requests/models/
|
|
|
|
|
|
|
|
which is under the License: Apache 2.0
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
pending = None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async for chunk in self.iter_content(
|
|
|
|
async for chunk in self.iter_content(
|
|
|
|
chunk_size=chunk_size, decode_unicode=decode_unicode
|
|
|
|
chunk_size=chunk_size, decode_unicode=decode_unicode
|
|
|
@ -63,7 +60,7 @@ class StreamResponse:
|
|
|
|
if pending is not None:
|
|
|
|
if pending is not None:
|
|
|
|
yield pending
|
|
|
|
yield pending
|
|
|
|
|
|
|
|
|
|
|
|
async def iter_content(self, chunk_size=None, decode_unicode=False) -> As:
|
|
|
|
async def iter_content(self, chunk_size=None, decode_unicode=False) -> AsyncGenerator[bytes, None]:
|
|
|
|
if chunk_size:
|
|
|
|
if chunk_size:
|
|
|
|
warnings.warn("chunk_size is ignored, there is no way to tell curl that.")
|
|
|
|
warnings.warn("chunk_size is ignored, there is no way to tell curl that.")
|
|
|
|
if decode_unicode:
|
|
|
|
if decode_unicode:
|
|
|
@ -77,22 +74,23 @@ class StreamResponse:
|
|
|
|
async def read(self) -> bytes:
|
|
|
|
async def read(self) -> bytes:
|
|
|
|
return b"".join([chunk async for chunk in self.iter_content()])
|
|
|
|
return b"".join([chunk async for chunk in self.iter_content()])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class StreamRequest:
|
|
|
|
class StreamRequest:
|
|
|
|
def __init__(self, session: AsyncSession, method: str, url: str, **kwargs):
|
|
|
|
def __init__(self, session: AsyncSession, method: str, url: str, **kwargs) -> None:
|
|
|
|
self.session = session
|
|
|
|
self.session: AsyncSession = session
|
|
|
|
self.loop = session.loop if session.loop else asyncio.get_running_loop()
|
|
|
|
self.loop: asyncio.AbstractEventLoop = session.loop if session.loop else asyncio.get_running_loop()
|
|
|
|
self.queue = Queue()
|
|
|
|
self.queue: Queue[bytes] = Queue()
|
|
|
|
self.method = method
|
|
|
|
self.method: str = method
|
|
|
|
self.url = url
|
|
|
|
self.url: str = url
|
|
|
|
self.options = kwargs
|
|
|
|
self.options: dict = kwargs
|
|
|
|
self.handle = None
|
|
|
|
self.handle: curl_cffi.AsyncCurl = None
|
|
|
|
|
|
|
|
|
|
|
|
def _on_content(self, data):
|
|
|
|
def _on_content(self, data: bytes) -> None:
|
|
|
|
if not self.enter.done():
|
|
|
|
if not self.enter.done():
|
|
|
|
self.enter.set_result(None)
|
|
|
|
self.enter.set_result(None)
|
|
|
|
self.queue.put_nowait(data)
|
|
|
|
self.queue.put_nowait(data)
|
|
|
|
|
|
|
|
|
|
|
|
def _on_done(self, task: Future):
|
|
|
|
def _on_done(self, task: Future) -> None:
|
|
|
|
if not self.enter.done():
|
|
|
|
if not self.enter.done():
|
|
|
|
self.enter.set_result(None)
|
|
|
|
self.enter.set_result(None)
|
|
|
|
self.queue.put_nowait(None)
|
|
|
|
self.queue.put_nowait(None)
|
|
|
@ -102,8 +100,8 @@ class StreamRequest:
|
|
|
|
async def fetch(self) -> StreamResponse:
|
|
|
|
async def fetch(self) -> StreamResponse:
|
|
|
|
if self.handle:
|
|
|
|
if self.handle:
|
|
|
|
raise RuntimeError("Request already started")
|
|
|
|
raise RuntimeError("Request already started")
|
|
|
|
self.curl = await self.session.pop_curl()
|
|
|
|
self.curl: curl_cffi.AsyncCurl = await self.session.pop_curl()
|
|
|
|
self.enter = self.loop.create_future()
|
|
|
|
self.enter: asyncio.Future = self.loop.create_future()
|
|
|
|
if is_newer_0_5_10:
|
|
|
|
if is_newer_0_5_10:
|
|
|
|
request, _, header_buffer, _, _ = self.session._set_curl_options(
|
|
|
|
request, _, header_buffer, _, _ = self.session._set_curl_options(
|
|
|
|
self.curl,
|
|
|
|
self.curl,
|
|
|
@ -144,10 +142,10 @@ class StreamRequest:
|
|
|
|
async def __aenter__(self) -> StreamResponse:
|
|
|
|
async def __aenter__(self) -> StreamResponse:
|
|
|
|
return await self.fetch()
|
|
|
|
return await self.fetch()
|
|
|
|
|
|
|
|
|
|
|
|
async def __aexit__(self, *args):
|
|
|
|
async def __aexit__(self, *args) -> None:
|
|
|
|
self.release_curl()
|
|
|
|
self.release_curl()
|
|
|
|
|
|
|
|
|
|
|
|
def release_curl(self):
|
|
|
|
def release_curl(self) -> None:
|
|
|
|
if is_newer_0_5_10:
|
|
|
|
if is_newer_0_5_10:
|
|
|
|
self.session.release_curl(self.curl)
|
|
|
|
self.session.release_curl(self.curl)
|
|
|
|
return
|
|
|
|
return
|
|
|
@ -162,6 +160,7 @@ class StreamRequest:
|
|
|
|
self.session.push_curl(self.curl)
|
|
|
|
self.session.push_curl(self.curl)
|
|
|
|
self.curl = None
|
|
|
|
self.curl = None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class StreamSession(AsyncSession):
|
|
|
|
class StreamSession(AsyncSession):
|
|
|
|
def request(
|
|
|
|
def request(
|
|
|
|
self,
|
|
|
|
self,
|
|
|
|