mirror of
https://github.com/xtekky/gpt4free.git
synced 2024-11-19 03:25:32 +00:00
Add Phind Provider
Add release_curl in url_cffi requets Support create image response in Bing
This commit is contained in:
parent
a4da030d67
commit
ff4d9ae584
@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
import random
|
||||
import json
|
||||
import os
|
||||
import urllib.parse
|
||||
from aiohttp import ClientSession, ClientTimeout
|
||||
from ..typing import AsyncGenerator
|
||||
from .base_provider import AsyncGeneratorProvider, get_cookies
|
||||
@ -245,7 +246,6 @@ async def stream_generate(
|
||||
await wss.send_str(create_message(conversation, prompt, context))
|
||||
|
||||
response_txt = ''
|
||||
result_text = ''
|
||||
returned_text = ''
|
||||
final = False
|
||||
|
||||
@ -260,14 +260,18 @@ async def stream_generate(
|
||||
if response.get('type') == 1 and response['arguments'][0].get('messages'):
|
||||
message = response['arguments'][0]['messages'][0]
|
||||
if (message['contentOrigin'] != 'Apology'):
|
||||
response_txt = result_text + \
|
||||
message['adaptiveCards'][0]['body'][0].get('text', '')
|
||||
|
||||
if 'adaptiveCards' in message:
|
||||
card = message['adaptiveCards'][0]['body'][0]
|
||||
if "text" in card:
|
||||
response_txt = card.get('text')
|
||||
if message.get('messageType'):
|
||||
inline_txt = message['adaptiveCards'][0]['body'][0]['inlines'][0].get('text')
|
||||
inline_txt = card['inlines'][0].get('text')
|
||||
response_txt += inline_txt + '\n'
|
||||
result_text += inline_txt + '\n'
|
||||
|
||||
elif message.get('contentType') == "IMAGE":
|
||||
query = urllib.parse.quote(message.get('text'))
|
||||
url = f"\nhttps://www.bing.com/images/create?q={query}"
|
||||
response_txt += url
|
||||
final = True
|
||||
if response_txt.startswith(returned_text):
|
||||
new = response_txt[len(returned_text):]
|
||||
if new != "\n":
|
||||
|
76
g4f/Provider/Phind.py
Normal file
76
g4f/Provider/Phind.py
Normal file
@ -0,0 +1,76 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import random
|
||||
from datetime import datetime
|
||||
|
||||
from ..typing import AsyncGenerator
|
||||
from ..requests import StreamSession
|
||||
from .base_provider import AsyncGeneratorProvider, format_prompt
|
||||
|
||||
|
||||
class Phind(AsyncGeneratorProvider):
|
||||
url = "https://www.phind.com"
|
||||
working = True
|
||||
supports_gpt_4 = True
|
||||
|
||||
@classmethod
|
||||
async def create_async_generator(
|
||||
cls,
|
||||
model: str,
|
||||
messages: list[dict[str, str]],
|
||||
proxy: str = None,
|
||||
**kwargs
|
||||
) -> AsyncGenerator:
|
||||
chars = 'abcdefghijklmnopqrstuvwxyz0123456789'
|
||||
user_id = ''.join(random.choice(chars) for _ in range(24))
|
||||
data = {
|
||||
"question": format_prompt(messages),
|
||||
"webResults": [],
|
||||
"options": {
|
||||
"date": datetime.now().strftime("%d.%m.%Y"),
|
||||
"language": "en",
|
||||
"detailed": True,
|
||||
"anonUserId": user_id,
|
||||
"answerModel": "GPT-4",
|
||||
"creativeMode": False,
|
||||
"customLinks": []
|
||||
},
|
||||
"context":""
|
||||
}
|
||||
headers = {
|
||||
"Authority": cls.url,
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"Origin": cls.url,
|
||||
"Referer": f"{cls.url}/"
|
||||
}
|
||||
async with StreamSession(headers=headers, timeout=(5, 180), proxies={"https": proxy}, impersonate="chrome107") as session:
|
||||
async with session.post(f"{cls.url}/api/infer/answer", json=data) as response:
|
||||
response.raise_for_status()
|
||||
new_lines = 0
|
||||
async for line in response.iter_lines():
|
||||
if not line:
|
||||
continue
|
||||
if line.startswith(b"data: "):
|
||||
line = line[6:]
|
||||
if line.startswith(b"<PHIND_METADATA>"):
|
||||
continue
|
||||
if line:
|
||||
if new_lines:
|
||||
yield "".join(["\n" for _ in range(int(new_lines / 2))])
|
||||
new_lines = 0
|
||||
yield line.decode()
|
||||
else:
|
||||
new_lines += 1
|
||||
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def params(cls):
|
||||
params = [
|
||||
("model", "str"),
|
||||
("messages", "list[dict[str, str]]"),
|
||||
("stream", "bool"),
|
||||
("proxy", "str"),
|
||||
]
|
||||
param = ", ".join([": ".join(p) for p in params])
|
||||
return f"g4f.provider.{cls.__name__} supports: ({param})"
|
@ -31,6 +31,7 @@ from .Opchatgpts import Opchatgpts
|
||||
from .OpenaiChat import OpenaiChat
|
||||
from .OpenAssistant import OpenAssistant
|
||||
from .PerplexityAi import PerplexityAi
|
||||
from .Phind import Phind
|
||||
from .Raycast import Raycast
|
||||
from .Theb import Theb
|
||||
from .Vercel import Vercel
|
||||
@ -85,6 +86,7 @@ __all__ = [
|
||||
'OpenaiChat',
|
||||
'OpenAssistant',
|
||||
'PerplexityAi',
|
||||
'Phind',
|
||||
'Theb',
|
||||
'Vercel',
|
||||
'Vitalentum',
|
||||
|
@ -97,8 +97,7 @@ class StreamRequest:
|
||||
self.enter.set_result(None)
|
||||
self.queue.put_nowait(None)
|
||||
|
||||
#self.loop.call_soon(self.session.rel, self.curl)
|
||||
return
|
||||
self.loop.call_soon(self.release_curl)
|
||||
|
||||
async def fetch(self) -> StreamResponse:
|
||||
if self.handle:
|
||||
@ -146,8 +145,22 @@ class StreamRequest:
|
||||
return await self.fetch()
|
||||
|
||||
async def __aexit__(self, *args):
|
||||
self.release_curl()
|
||||
|
||||
def release_curl(self):
|
||||
if is_newer_0_5_10:
|
||||
self.session.release_curl(self.curl)
|
||||
return
|
||||
#self.session.release_curl(self.curl)
|
||||
if not self.curl:
|
||||
return
|
||||
self.curl.clean_after_perform()
|
||||
if is_newer_0_5_9:
|
||||
self.session.acurl.remove_handle(self.curl)
|
||||
elif not self.handle.done() and not self.handle.cancelled():
|
||||
self.session.acurl.set_result(self.curl)
|
||||
self.curl.reset()
|
||||
self.session.push_curl(self.curl)
|
||||
self.curl = None
|
||||
|
||||
class StreamSession(AsyncSession):
|
||||
def request(
|
||||
|
Loading…
Reference in New Issue
Block a user