Add aiohttp_socks support

pull/1510/head
Heiner Lohaus 5 months ago
parent 91feb34054
commit 8864b70ee4

@ -126,6 +126,7 @@ def analyze_code(pull: PullRequest, diff: str)-> list[dict]:
for line in diff.split('\n'):
if line.startswith('+++ b/'):
current_file_path = line[6:]
changed_lines = []
elif line.startswith('@@'):
match = re.search(r'\+([0-9]+?),', line)
if match:
@ -137,9 +138,10 @@ def analyze_code(pull: PullRequest, diff: str)-> list[dict]:
for review in response.get('reviews', []):
review['path'] = current_file_path
comments.append(review)
changed_lines = []
current_file_path = None
elif not line.startswith('-'):
elif line.startswith('-'):
changed_lines.append(line)
else:
changed_lines.append(f"{offset_line}:{line}")
offset_line += 1

@ -6,7 +6,7 @@ import os
import uuid
import time
from urllib import parse
from aiohttp import ClientSession, ClientTimeout
from aiohttp import ClientSession, ClientTimeout, BaseConnector
from ..typing import AsyncResult, Messages, ImageType
from ..image import ImageResponse
@ -39,6 +39,7 @@ class Bing(AsyncGeneratorProvider):
proxy: str = None,
timeout: int = 900,
cookies: dict = None,
connector: BaseConnector = None,
tone: str = Tones.balanced,
image: ImageType = None,
web_search: bool = False,
@ -67,8 +68,15 @@ class Bing(AsyncGeneratorProvider):
cookies = {**Defaults.cookies, **cookies} if cookies else Defaults.cookies
gpt4_turbo = True if model.startswith("gpt-4-turbo") else False
if proxy and not connector:
try:
from aiohttp_socks import ProxyConnector
connector = ProxyConnector.from_url(proxy)
except ImportError:
raise RuntimeError('Install "aiohttp_socks" package for proxy support')
return stream_generate(prompt, tone, image, context, proxy, cookies, web_search, gpt4_turbo, timeout)
return stream_generate(prompt, tone, image, context, cookies, connector, web_search, gpt4_turbo, timeout)
def create_context(messages: Messages) -> str:
"""
@ -253,8 +261,8 @@ async def stream_generate(
tone: str,
image: ImageType = None,
context: str = None,
proxy: str = None,
cookies: dict = None,
connector: BaseConnector = None,
web_search: bool = False,
gpt4_turbo: bool = False,
timeout: int = 900
@ -266,7 +274,6 @@ async def stream_generate(
:param tone: The desired tone for the response.
:param image: The image type involved in the response.
:param context: Additional context for the prompt.
:param proxy: Proxy settings for the request.
:param cookies: Cookies for the session.
:param web_search: Flag to enable web search.
:param gpt4_turbo: Flag to enable GPT-4 Turbo.
@ -278,10 +285,10 @@ async def stream_generate(
headers["Cookie"] = "; ".join(f"{k}={v}" for k, v in cookies.items())
async with ClientSession(
timeout=ClientTimeout(total=timeout), headers=headers
timeout=ClientTimeout(total=timeout), headers=headers, connector=connector
) as session:
conversation = await create_conversation(session, proxy)
image_response = await upload_image(session, image, tone, proxy) if image else None
conversation = await create_conversation(session)
image_response = await upload_image(session, image, tone) if image else None
if image_response:
yield image_response
@ -289,8 +296,7 @@ async def stream_generate(
async with session.ws_connect(
'wss://sydney.bing.com/sydney/ChatHub',
autoping=False,
params={'sec_access_token': conversation.conversationSignature},
proxy=proxy
params={'sec_access_token': conversation.conversationSignature}
) as wss:
await wss.send_str(format_message({'protocol': 'json', 'version': 1}))
await wss.receive(timeout=timeout)
@ -322,7 +328,7 @@ async def stream_generate(
elif message.get('contentType') == "IMAGE":
prompt = message.get('text')
try:
image_response = ImageResponse(await create_images(session, prompt, proxy), prompt)
image_response = ImageResponse(await create_images(session, prompt), prompt)
except:
response_txt += f"\nhttps://www.bing.com/images/create?q={parse.quote(prompt)}"
final = True
@ -342,4 +348,4 @@ async def stream_generate(
raise Exception(f"{result['value']}: {result['message']}")
return
finally:
await delete_conversation(session, conversation, proxy)
await delete_conversation(session, conversation)

@ -2,7 +2,7 @@ from __future__ import annotations
import json, uuid
from aiohttp import ClientSession
from aiohttp import ClientSession, BaseConnector
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
@ -33,6 +33,7 @@ class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin):
messages: Messages,
stream: bool = True,
proxy: str = None,
connector: BaseConnector = None,
web_search: bool = False,
cookies: dict = None,
**kwargs
@ -43,9 +44,16 @@ class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin):
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
}
if proxy and not connector:
try:
from aiohttp_socks import ProxyConnector
connector = ProxyConnector.from_url(proxy)
except ImportError:
raise RuntimeError('Install "aiohttp_socks" package for proxy support')
async with ClientSession(
cookies=cookies,
headers=headers
headers=headers,
connector=connector
) as session:
async with session.post(f"{cls.url}/conversation", json={"model": cls.get_model(model)}, proxy=proxy) as response:
conversation_id = (await response.json())["conversationId"]

@ -2,7 +2,7 @@ from __future__ import annotations
import uuid
from aiohttp import ClientSession
from aiohttp import ClientSession, BaseConnector
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
@ -91,6 +91,7 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
messages: Messages,
auth: str = None,
proxy: str = None,
connector: BaseConnector = None,
**kwargs
) -> AsyncResult:
headers = {
@ -100,9 +101,16 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
"referer": f"{cls.url}/",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36",
}
if proxy and not connector:
try:
from aiohttp_socks import ProxyConnector
connector = ProxyConnector.from_url(proxy)
except ImportError:
raise RuntimeError('Install "aiohttp_socks" package for proxy support')
async with ClientSession(
headers=headers,
cookie_jar=cls._cookie_jar
cookie_jar=cls._cookie_jar,
connector=connector
) as session:
cls._auth_code = auth if isinstance(auth, str) else cls._auth_code
if not cls._auth_code:

@ -2,7 +2,7 @@ from __future__ import annotations
import random
import json
from aiohttp import ClientSession
from aiohttp import ClientSession, BaseConnector
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
@ -32,6 +32,7 @@ class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin):
model: str,
messages: Messages,
proxy: str = None,
connector: BaseConnector = None,
**kwargs
) -> AsyncResult:
headers = {
@ -47,7 +48,13 @@ class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin):
"Sec-Fetch-Site": "same-site",
"TE": "trailers",
}
async with ClientSession(headers=headers) as session:
if proxy and not connector:
try:
from aiohttp_socks import ProxyConnector
connector = ProxyConnector.from_url(proxy)
except ImportError:
raise RuntimeError('Install "aiohttp_socks" package for proxy support')
async with ClientSession(headers=headers, connector=connector) as session:
t = format(random.getrandbits(32), '08x')
async with session.get(
f"{API_URL}?EIO=4&transport=polling&t={t}",

@ -7,7 +7,7 @@ import asyncio
import time
import json
import os
from aiohttp import ClientSession
from aiohttp import ClientSession, BaseConnector
from bs4 import BeautifulSoup
from urllib.parse import quote
from typing import Generator, List, Dict
@ -50,7 +50,7 @@ def wait_for_login(driver: WebDriver, timeout: int = TIMEOUT_LOGIN) -> None:
raise RuntimeError("Timeout error")
time.sleep(0.5)
def create_session(cookies: Dict[str, str]) -> ClientSession:
def create_session(cookies: Dict[str, str], proxy: str = None, connector: BaseConnector = None) -> ClientSession:
"""
Creates a new client session with specified cookies and headers.
@ -79,7 +79,13 @@ def create_session(cookies: Dict[str, str]) -> ClientSession:
}
if cookies:
headers["Cookie"] = "; ".join(f"{k}={v}" for k, v in cookies.items())
return ClientSession(headers=headers)
if proxy and not connector:
try:
from aiohttp_socks import ProxyConnector
connector = ProxyConnector.from_url(proxy)
except ImportError:
raise RuntimeError('Install "aiohttp_socks" package for proxy support')
return ClientSession(headers=headers, connector=connector)
async def create_images(session: ClientSession, prompt: str, proxy: str = None, timeout: int = TIMEOUT_IMAGE_CREATION) -> List[str]:
"""
@ -214,7 +220,8 @@ class CreateImagesBing:
cookies = self.cookies or get_cookies(".bing.com")
if "_U" not in cookies:
raise RuntimeError('"_U" cookie is missing')
async with create_session(cookies) as session:
proxy = os.environ.get("G4F_PROXY")
async with create_session(cookies, proxy) as session:
images = await create_images(session, prompt, self.proxy)
return ImageResponse(images, prompt)

Loading…
Cancel
Save