mirror of
https://github.com/xtekky/gpt4free.git
synced 2024-11-01 03:21:02 +00:00
Added new Provider Chatxyz (#1393)
* Added fucntionality for Provider * Added Provider in init * System message if present must be the first object in message array
This commit is contained in:
parent
1ddd01f1ad
commit
eb1e91d123
68
g4f/Provider/Chatxyz.py
Normal file
68
g4f/Provider/Chatxyz.py
Normal file
@ -0,0 +1,68 @@
|
||||
from __future__ import annotations
|
||||
from aiohttp import ClientSession
|
||||
from ..typing import AsyncResult, Messages
|
||||
from .base_provider import AsyncGeneratorProvider
|
||||
import json
|
||||
class Chatxyz(AsyncGeneratorProvider):
|
||||
url = "https://chat.3211000.xyz"
|
||||
working = True
|
||||
supports_gpt_35_turbo = True
|
||||
|
||||
@classmethod
|
||||
async def create_async_generator(
|
||||
cls,
|
||||
model: str,
|
||||
messages: Messages,
|
||||
proxy: str = None,
|
||||
**kwargs
|
||||
) -> AsyncResult:
|
||||
if not model:
|
||||
model = "gpt-3.5-turbo"
|
||||
headers = {
|
||||
'Accept': 'text/event-stream',
|
||||
'Accept-Encoding': 'gzip, deflate, br',
|
||||
'Accept-Language': 'en-US,en;q=0.5',
|
||||
'Alt-Used': 'chat.3211000.xyz',
|
||||
'Content-Type': 'application/json',
|
||||
'Host': 'chat.3211000.xyz',
|
||||
'Origin': 'https://chat.3211000.xyz',
|
||||
'Referer': 'https://chat.3211000.xyz/',
|
||||
'Sec-Fetch-Dest': 'empty',
|
||||
'Sec-Fetch-Mode': 'cors',
|
||||
'Sec-Fetch-Site': 'same-origin',
|
||||
'TE': 'trailers',
|
||||
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:121.0) Gecko/20100101 Firefox/121.0',
|
||||
'x-requested-with': 'XMLHttpRequest'
|
||||
}
|
||||
async with ClientSession(headers=headers) as session:
|
||||
|
||||
system_message = ""
|
||||
user_messages=[]
|
||||
for message in messages:
|
||||
if message["role"] == "system":
|
||||
system_message+=f'{message["content"]}\n'
|
||||
else:
|
||||
user_messages.append(message)
|
||||
new_message = [{'role':'system','content':system_message}]
|
||||
for i in user_messages:
|
||||
new_message.append(i)
|
||||
|
||||
data = {
|
||||
"messages": new_message,
|
||||
"stream": True,
|
||||
"model": "gpt-3.5-turbo",
|
||||
"temperature": 0.5,
|
||||
"presence_penalty": 0,
|
||||
"frequency_penalty": 0,
|
||||
"top_p": 1
|
||||
}
|
||||
async with session.post(f'{cls.url}/api/openai/v1/chat/completions',json=data) as response:
|
||||
response.raise_for_status()
|
||||
async for chunk in response.content:
|
||||
line = chunk.decode()
|
||||
if line.startswith("data: [DONE]"):
|
||||
break
|
||||
elif line.startswith("data: "):
|
||||
line = json.loads(line[6:])
|
||||
if(line["choices"][0]["delta"]["content"]!=None):
|
||||
yield line["choices"][0]["delta"]["content"]
|
@ -25,6 +25,7 @@ from .ChatgptFree import ChatgptFree
|
||||
from .ChatgptLogin import ChatgptLogin
|
||||
from .ChatgptNext import ChatgptNext
|
||||
from .ChatgptX import ChatgptX
|
||||
from .Chatxyz import Chatxyz
|
||||
from .DeepInfra import DeepInfra
|
||||
from .FakeGpt import FakeGpt
|
||||
from .FreeGpt import FreeGpt
|
||||
|
Loading…
Reference in New Issue
Block a user