2024-01-23 00:50:44 +00:00
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
import random
|
|
|
|
import json
|
2024-01-23 22:48:11 +00:00
|
|
|
from aiohttp import ClientSession, BaseConnector
|
2024-01-23 00:50:44 +00:00
|
|
|
|
|
|
|
from ..typing import AsyncResult, Messages
|
2024-01-23 18:44:48 +00:00
|
|
|
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
2024-01-23 23:46:35 +00:00
|
|
|
from .helper import get_connector
|
2024-01-23 00:50:44 +00:00
|
|
|
|
|
|
|
API_URL = "https://labs-api.perplexity.ai/socket.io/"
|
|
|
|
WS_URL = "wss://labs-api.perplexity.ai/socket.io/"
|
|
|
|
|
2024-01-23 18:44:48 +00:00
|
|
|
class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin):
|
2024-01-23 00:50:44 +00:00
|
|
|
url = "https://labs.perplexity.ai"
|
|
|
|
working = True
|
2024-01-26 06:54:13 +00:00
|
|
|
default_model = 'pplx-70b-online'
|
2024-01-23 18:44:48 +00:00
|
|
|
models = [
|
|
|
|
'pplx-7b-online', 'pplx-70b-online', 'pplx-7b-chat', 'pplx-70b-chat', 'mistral-7b-instruct',
|
2024-01-23 04:02:14 +00:00
|
|
|
'codellama-34b-instruct', 'llama-2-70b-chat', 'llava-7b-chat', 'mixtral-8x7b-instruct',
|
2024-01-23 18:44:48 +00:00
|
|
|
'mistral-medium', 'related'
|
|
|
|
]
|
|
|
|
model_aliases = {
|
2024-01-23 04:02:14 +00:00
|
|
|
"mistralai/Mistral-7B-Instruct-v0.1": "mistral-7b-instruct",
|
|
|
|
"meta-llama/Llama-2-70b-chat-hf": "llama-2-70b-chat",
|
|
|
|
"mistralai/Mixtral-8x7B-Instruct-v0.1": "mixtral-8x7b-instruct",
|
|
|
|
"codellama/CodeLlama-34b-Instruct-hf": "codellama-34b-instruct"
|
|
|
|
}
|
2024-01-23 00:50:44 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
async def create_async_generator(
|
|
|
|
cls,
|
|
|
|
model: str,
|
|
|
|
messages: Messages,
|
|
|
|
proxy: str = None,
|
2024-01-23 22:48:11 +00:00
|
|
|
connector: BaseConnector = None,
|
2024-01-23 00:50:44 +00:00
|
|
|
**kwargs
|
|
|
|
) -> AsyncResult:
|
|
|
|
headers = {
|
|
|
|
"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:121.0) Gecko/20100101 Firefox/121.0",
|
|
|
|
"Accept": "*/*",
|
|
|
|
"Accept-Language": "de,en-US;q=0.7,en;q=0.3",
|
|
|
|
"Accept-Encoding": "gzip, deflate, br",
|
|
|
|
"Origin": cls.url,
|
|
|
|
"Connection": "keep-alive",
|
|
|
|
"Referer": f"{cls.url}/",
|
|
|
|
"Sec-Fetch-Dest": "empty",
|
|
|
|
"Sec-Fetch-Mode": "cors",
|
|
|
|
"Sec-Fetch-Site": "same-site",
|
|
|
|
"TE": "trailers",
|
|
|
|
}
|
2024-01-23 23:46:35 +00:00
|
|
|
async with ClientSession(headers=headers, connector=get_connector(connector, proxy)) as session:
|
2024-01-23 00:50:44 +00:00
|
|
|
t = format(random.getrandbits(32), '08x')
|
|
|
|
async with session.get(
|
2024-01-26 06:54:13 +00:00
|
|
|
f"{API_URL}?EIO=4&transport=polling&t={t}"
|
2024-01-23 00:50:44 +00:00
|
|
|
) as response:
|
|
|
|
text = await response.text()
|
|
|
|
|
|
|
|
sid = json.loads(text[1:])['sid']
|
|
|
|
post_data = '40{"jwt":"anonymous-ask-user"}'
|
|
|
|
async with session.post(
|
|
|
|
f'{API_URL}?EIO=4&transport=polling&t={t}&sid={sid}',
|
2024-01-26 06:54:13 +00:00
|
|
|
data=post_data
|
2024-01-23 00:50:44 +00:00
|
|
|
) as response:
|
|
|
|
assert await response.text() == 'OK'
|
|
|
|
|
|
|
|
async with session.ws_connect(f'{WS_URL}?EIO=4&transport=websocket&sid={sid}', autoping=False) as ws:
|
|
|
|
await ws.send_str('2probe')
|
|
|
|
assert(await ws.receive_str() == '3probe')
|
|
|
|
await ws.send_str('5')
|
|
|
|
assert(await ws.receive_str())
|
|
|
|
assert(await ws.receive_str() == '6')
|
|
|
|
message_data = {
|
|
|
|
'version': '2.2',
|
|
|
|
'source': 'default',
|
2024-01-23 18:44:48 +00:00
|
|
|
'model': cls.get_model(model),
|
2024-01-23 00:50:44 +00:00
|
|
|
'messages': messages
|
|
|
|
}
|
2024-01-27 01:00:44 +00:00
|
|
|
await ws.send_str('42' + json.dumps(['perplexity_labs', message_data]))
|
2024-01-23 00:50:44 +00:00
|
|
|
last_message = 0
|
|
|
|
while True:
|
|
|
|
message = await ws.receive_str()
|
|
|
|
if message == '2':
|
|
|
|
await ws.send_str('3')
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
data = json.loads(message[2:])[1]
|
|
|
|
yield data["output"][last_message:]
|
|
|
|
last_message = len(data["output"])
|
|
|
|
if data["final"]:
|
|
|
|
break
|
|
|
|
except:
|
|
|
|
raise RuntimeError(f"Message: {message}")
|