mirror of
https://github.com/xtekky/gpt4free.git
synced 2024-11-05 00:01:00 +00:00
Added OpenChat's Aura Model as a new Provider (#1371)
* Added File for new Provider: Aura * Added Aura Provider in init
This commit is contained in:
parent
a5e6ab798f
commit
bcd1cdf4e8
56
g4f/Provider/Aura.py
Normal file
56
g4f/Provider/Aura.py
Normal file
@ -0,0 +1,56 @@
|
||||
from __future__ import annotations
|
||||
from aiohttp import ClientSession
|
||||
from ..typing import AsyncResult, Messages
|
||||
from .base_provider import AsyncGeneratorProvider
|
||||
|
||||
class Aura(AsyncGeneratorProvider):
|
||||
url = "https://openchat.team"
|
||||
working = True
|
||||
supports_gpt_35_turbo = True
|
||||
|
||||
@classmethod
|
||||
async def create_async_generator(
|
||||
cls,
|
||||
model: str,
|
||||
messages: Messages,
|
||||
proxy: str = None,
|
||||
**kwargs
|
||||
) -> AsyncResult:
|
||||
headers = {
|
||||
"Accept": "*/*",
|
||||
"Accept-Encoding": "gzip, deflate, br",
|
||||
"Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8",
|
||||
"Content-Type": "application/json",
|
||||
"Origin": f"{cls.url}",
|
||||
"Referer": f"{cls.url}/",
|
||||
"Sec-Ch-Ua": '"Not_A Brand";v="8", "Chromium";v="120", "Google Chrome";v="120"',
|
||||
"Sec-Ch-Ua-Mobile": "?0",
|
||||
"Sec-Ch-Ua-Platform": '"Linux"',
|
||||
"Sec-Fetch-Dest": "empty",
|
||||
"Sec-Fetch-Mode": "cors",
|
||||
"Sec-Fetch-Site": "same-origin",
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
|
||||
}
|
||||
async with ClientSession(headers=headers) as session:
|
||||
system_prompt=""
|
||||
system_message=[]
|
||||
for message in messages:
|
||||
if message["role"]=="system":
|
||||
system_prompt +=message["content"]
|
||||
else:
|
||||
system_message.append(message)
|
||||
|
||||
data = {
|
||||
"model": {
|
||||
"id": "openchat_v3.2_mistral",
|
||||
"name": "OpenChat Aura",
|
||||
"maxLength": 24576,
|
||||
"tokenLimit": 8192
|
||||
},
|
||||
"messages": system_message,
|
||||
"key": "",
|
||||
"prompt": f"{system_prompt}",
|
||||
"temperature": 0.5
|
||||
}
|
||||
async with session.post(f"{cls.url}/api/chat",json=data,proxy=proxy) as response:
|
||||
yield await response.text()
|
@ -6,7 +6,7 @@ from .deprecated import *
|
||||
from .needs_auth import *
|
||||
from .unfinished import *
|
||||
from .selenium import *
|
||||
|
||||
from .Aura import Aura
|
||||
from .AiAsk import AiAsk
|
||||
from .Aichat import Aichat
|
||||
from .AiChatOnline import AiChatOnline
|
||||
|
Loading…
Reference in New Issue
Block a user