Add Phind Provider

Add release_curl in url_cffi requets
Support create image response in Bing
This commit is contained in:
Heiner Lohaus 2023-10-02 17:01:15 +02:00
parent a4da030d67
commit ff4d9ae584
4 changed files with 108 additions and 13 deletions

View File

@ -3,6 +3,7 @@ from __future__ import annotations
import random import random
import json import json
import os import os
import urllib.parse
from aiohttp import ClientSession, ClientTimeout from aiohttp import ClientSession, ClientTimeout
from ..typing import AsyncGenerator from ..typing import AsyncGenerator
from .base_provider import AsyncGeneratorProvider, get_cookies from .base_provider import AsyncGeneratorProvider, get_cookies
@ -245,7 +246,6 @@ async def stream_generate(
await wss.send_str(create_message(conversation, prompt, context)) await wss.send_str(create_message(conversation, prompt, context))
response_txt = '' response_txt = ''
result_text = ''
returned_text = '' returned_text = ''
final = False final = False
@ -260,14 +260,18 @@ async def stream_generate(
if response.get('type') == 1 and response['arguments'][0].get('messages'): if response.get('type') == 1 and response['arguments'][0].get('messages'):
message = response['arguments'][0]['messages'][0] message = response['arguments'][0]['messages'][0]
if (message['contentOrigin'] != 'Apology'): if (message['contentOrigin'] != 'Apology'):
response_txt = result_text + \ if 'adaptiveCards' in message:
message['adaptiveCards'][0]['body'][0].get('text', '') card = message['adaptiveCards'][0]['body'][0]
if "text" in card:
response_txt = card.get('text')
if message.get('messageType'): if message.get('messageType'):
inline_txt = message['adaptiveCards'][0]['body'][0]['inlines'][0].get('text') inline_txt = card['inlines'][0].get('text')
response_txt += inline_txt + '\n' response_txt += inline_txt + '\n'
result_text += inline_txt + '\n' elif message.get('contentType') == "IMAGE":
query = urllib.parse.quote(message.get('text'))
url = f"\nhttps://www.bing.com/images/create?q={query}"
response_txt += url
final = True
if response_txt.startswith(returned_text): if response_txt.startswith(returned_text):
new = response_txt[len(returned_text):] new = response_txt[len(returned_text):]
if new != "\n": if new != "\n":

76
g4f/Provider/Phind.py Normal file
View File

@ -0,0 +1,76 @@
from __future__ import annotations
import random
from datetime import datetime
from ..typing import AsyncGenerator
from ..requests import StreamSession
from .base_provider import AsyncGeneratorProvider, format_prompt
class Phind(AsyncGeneratorProvider):
url = "https://www.phind.com"
working = True
supports_gpt_4 = True
@classmethod
async def create_async_generator(
cls,
model: str,
messages: list[dict[str, str]],
proxy: str = None,
**kwargs
) -> AsyncGenerator:
chars = 'abcdefghijklmnopqrstuvwxyz0123456789'
user_id = ''.join(random.choice(chars) for _ in range(24))
data = {
"question": format_prompt(messages),
"webResults": [],
"options": {
"date": datetime.now().strftime("%d.%m.%Y"),
"language": "en",
"detailed": True,
"anonUserId": user_id,
"answerModel": "GPT-4",
"creativeMode": False,
"customLinks": []
},
"context":""
}
headers = {
"Authority": cls.url,
"Accept": "application/json, text/plain, */*",
"Origin": cls.url,
"Referer": f"{cls.url}/"
}
async with StreamSession(headers=headers, timeout=(5, 180), proxies={"https": proxy}, impersonate="chrome107") as session:
async with session.post(f"{cls.url}/api/infer/answer", json=data) as response:
response.raise_for_status()
new_lines = 0
async for line in response.iter_lines():
if not line:
continue
if line.startswith(b"data: "):
line = line[6:]
if line.startswith(b"<PHIND_METADATA>"):
continue
if line:
if new_lines:
yield "".join(["\n" for _ in range(int(new_lines / 2))])
new_lines = 0
yield line.decode()
else:
new_lines += 1
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
("proxy", "str"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

View File

@ -31,6 +31,7 @@ from .Opchatgpts import Opchatgpts
from .OpenaiChat import OpenaiChat from .OpenaiChat import OpenaiChat
from .OpenAssistant import OpenAssistant from .OpenAssistant import OpenAssistant
from .PerplexityAi import PerplexityAi from .PerplexityAi import PerplexityAi
from .Phind import Phind
from .Raycast import Raycast from .Raycast import Raycast
from .Theb import Theb from .Theb import Theb
from .Vercel import Vercel from .Vercel import Vercel
@ -85,6 +86,7 @@ __all__ = [
'OpenaiChat', 'OpenaiChat',
'OpenAssistant', 'OpenAssistant',
'PerplexityAi', 'PerplexityAi',
'Phind',
'Theb', 'Theb',
'Vercel', 'Vercel',
'Vitalentum', 'Vitalentum',

View File

@ -97,8 +97,7 @@ class StreamRequest:
self.enter.set_result(None) self.enter.set_result(None)
self.queue.put_nowait(None) self.queue.put_nowait(None)
#self.loop.call_soon(self.session.rel, self.curl) self.loop.call_soon(self.release_curl)
return
async def fetch(self) -> StreamResponse: async def fetch(self) -> StreamResponse:
if self.handle: if self.handle:
@ -146,8 +145,22 @@ class StreamRequest:
return await self.fetch() return await self.fetch()
async def __aexit__(self, *args): async def __aexit__(self, *args):
self.release_curl()
def release_curl(self):
if is_newer_0_5_10:
self.session.release_curl(self.curl)
return return
#self.session.release_curl(self.curl) if not self.curl:
return
self.curl.clean_after_perform()
if is_newer_0_5_9:
self.session.acurl.remove_handle(self.curl)
elif not self.handle.done() and not self.handle.cancelled():
self.session.acurl.set_result(self.curl)
self.curl.reset()
self.session.push_curl(self.curl)
self.curl = None
class StreamSession(AsyncSession): class StreamSession(AsyncSession):
def request( def request(