2023-09-03 08:26:26 +00:00
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
import uuid
|
2024-01-23 22:48:11 +00:00
|
|
|
from aiohttp import ClientSession, BaseConnector
|
2023-07-28 10:07:17 +00:00
|
|
|
|
2023-10-09 08:22:17 +00:00
|
|
|
from ..typing import AsyncResult, Messages
|
2024-01-23 18:44:48 +00:00
|
|
|
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
|
2024-01-23 23:46:35 +00:00
|
|
|
from .helper import get_connector
|
2024-03-12 01:06:06 +00:00
|
|
|
from ..requests import raise_for_status
|
2023-07-28 10:07:17 +00:00
|
|
|
|
2024-08-31 06:47:39 +00:00
|
|
|
models = {
|
|
|
|
"gpt-4o-mini-free": {
|
|
|
|
"id": "gpt-4o-mini-free",
|
|
|
|
"name": "GPT-4o-Mini-Free",
|
|
|
|
"model": "ChatGPT",
|
|
|
|
"provider": "OpenAI",
|
|
|
|
"maxLength": 31200,
|
|
|
|
"tokenLimit": 7800,
|
|
|
|
"context": "8K",
|
|
|
|
},
|
|
|
|
"gpt-4o-mini": {
|
|
|
|
"id": "gpt-4o-mini",
|
|
|
|
"name": "GPT-4o-Mini",
|
|
|
|
"model": "ChatGPT",
|
|
|
|
"provider": "OpenAI",
|
|
|
|
"maxLength": 260000,
|
|
|
|
"tokenLimit": 126000,
|
|
|
|
"context": "128K",
|
|
|
|
},
|
|
|
|
"gpt-4o-free": {
|
|
|
|
"id": "gpt-4o-free",
|
|
|
|
"name": "GPT-4o-free",
|
|
|
|
"model": "ChatGPT",
|
|
|
|
"provider": "OpenAI",
|
|
|
|
"maxLength": 31200,
|
|
|
|
"tokenLimit": 7800,
|
|
|
|
"context": "8K",
|
|
|
|
},
|
|
|
|
"gpt-4-turbo-2024-04-09": {
|
|
|
|
"id": "gpt-4-turbo-2024-04-09",
|
|
|
|
"name": "GPT-4-Turbo",
|
|
|
|
"model": "ChatGPT",
|
|
|
|
"provider": "OpenAI",
|
|
|
|
"maxLength": 260000,
|
|
|
|
"tokenLimit": 126000,
|
|
|
|
"context": "128K",
|
|
|
|
},
|
|
|
|
"gpt-4o-2024-08-06": {
|
|
|
|
"id": "gpt-4o-2024-08-06",
|
|
|
|
"name": "GPT-4o",
|
|
|
|
"model": "ChatGPT",
|
|
|
|
"provider": "OpenAI",
|
|
|
|
"maxLength": 260000,
|
|
|
|
"tokenLimit": 126000,
|
|
|
|
"context": "128K",
|
|
|
|
},
|
|
|
|
"gpt-4-0613": {
|
|
|
|
"id": "gpt-4-0613",
|
|
|
|
"name": "GPT-4-0613",
|
|
|
|
"model": "ChatGPT",
|
|
|
|
"provider": "OpenAI",
|
|
|
|
"maxLength": 32000,
|
|
|
|
"tokenLimit": 7600,
|
|
|
|
"context": "8K",
|
|
|
|
},
|
|
|
|
"claude-3-opus-20240229": {
|
|
|
|
"id": "claude-3-opus-20240229",
|
|
|
|
"name": "Claude-3-Opus",
|
|
|
|
"model": "Claude",
|
|
|
|
"provider": "Anthropic",
|
|
|
|
"maxLength": 800000,
|
|
|
|
"tokenLimit": 200000,
|
|
|
|
"context": "200K",
|
|
|
|
},
|
|
|
|
"claude-3-opus-20240229-aws": {
|
|
|
|
"id": "claude-3-opus-20240229-aws",
|
|
|
|
"name": "Claude-3-Opus-Aws",
|
|
|
|
"model": "Claude",
|
|
|
|
"provider": "Anthropic",
|
|
|
|
"maxLength": 800000,
|
|
|
|
"tokenLimit": 200000,
|
|
|
|
"context": "200K",
|
|
|
|
},
|
|
|
|
"claude-3-opus-20240229-gcp": {
|
|
|
|
"id": "claude-3-opus-20240229-gcp",
|
|
|
|
"name": "Claude-3-Opus-Gcp",
|
|
|
|
"model": "Claude",
|
|
|
|
"provider": "Anthropic",
|
|
|
|
"maxLength": 800000,
|
|
|
|
"tokenLimit": 200000,
|
|
|
|
"context": "200K",
|
|
|
|
},
|
|
|
|
"claude-3-sonnet-20240229": {
|
|
|
|
"id": "claude-3-sonnet-20240229",
|
|
|
|
"name": "Claude-3-Sonnet",
|
|
|
|
"model": "Claude",
|
|
|
|
"provider": "Anthropic",
|
|
|
|
"maxLength": 800000,
|
|
|
|
"tokenLimit": 200000,
|
|
|
|
"context": "200K",
|
|
|
|
},
|
|
|
|
"claude-3-5-sonnet-20240620": {
|
|
|
|
"id": "claude-3-5-sonnet-20240620",
|
|
|
|
"name": "Claude-3.5-Sonnet",
|
|
|
|
"model": "Claude",
|
|
|
|
"provider": "Anthropic",
|
|
|
|
"maxLength": 800000,
|
|
|
|
"tokenLimit": 200000,
|
|
|
|
"context": "200K",
|
|
|
|
},
|
|
|
|
"claude-3-haiku-20240307": {
|
|
|
|
"id": "claude-3-haiku-20240307",
|
|
|
|
"name": "Claude-3-Haiku",
|
|
|
|
"model": "Claude",
|
|
|
|
"provider": "Anthropic",
|
|
|
|
"maxLength": 800000,
|
|
|
|
"tokenLimit": 200000,
|
|
|
|
"context": "200K",
|
|
|
|
},
|
|
|
|
"claude-2.1": {
|
|
|
|
"id": "claude-2.1",
|
|
|
|
"name": "Claude-2.1-200k",
|
|
|
|
"model": "Claude",
|
|
|
|
"provider": "Anthropic",
|
|
|
|
"maxLength": 800000,
|
|
|
|
"tokenLimit": 200000,
|
|
|
|
"context": "200K",
|
|
|
|
},
|
|
|
|
"gemini-1.0-pro-latest": {
|
|
|
|
"id": "gemini-1.0-pro-latest",
|
|
|
|
"name": "Gemini-Pro",
|
|
|
|
"model": "Gemini",
|
|
|
|
"provider": "Google",
|
|
|
|
"maxLength": 120000,
|
|
|
|
"tokenLimit": 30000,
|
|
|
|
"context": "32K",
|
|
|
|
},
|
|
|
|
"gemini-1.5-flash-latest": {
|
|
|
|
"id": "gemini-1.5-flash-latest",
|
|
|
|
"name": "Gemini-1.5-Flash-1M",
|
|
|
|
"model": "Gemini",
|
|
|
|
"provider": "Google",
|
|
|
|
"maxLength": 4000000,
|
|
|
|
"tokenLimit": 1000000,
|
|
|
|
"context": "1024K",
|
|
|
|
},
|
|
|
|
"gemini-1.5-pro-latest": {
|
|
|
|
"id": "gemini-1.5-pro-latest",
|
|
|
|
"name": "Gemini-1.5-Pro-1M",
|
|
|
|
"model": "Gemini",
|
|
|
|
"provider": "Google",
|
|
|
|
"maxLength": 4000000,
|
|
|
|
"tokenLimit": 1000000,
|
|
|
|
"context": "1024K",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-01-23 18:44:48 +00:00
|
|
|
class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
|
2023-10-05 03:13:37 +00:00
|
|
|
url = "https://liaobots.site"
|
2024-03-08 09:12:13 +00:00
|
|
|
working = True
|
2023-10-27 20:59:14 +00:00
|
|
|
supports_message_history = True
|
2024-03-12 01:06:06 +00:00
|
|
|
supports_system_message = True
|
2023-08-27 23:43:45 +00:00
|
|
|
supports_gpt_4 = True
|
2024-07-25 06:21:55 +00:00
|
|
|
default_model = "gpt-4o"
|
2024-08-31 06:47:39 +00:00
|
|
|
models = list(models.keys())
|
|
|
|
|
2024-01-23 18:44:48 +00:00
|
|
|
model_aliases = {
|
2024-07-25 06:21:55 +00:00
|
|
|
"gpt-4o-mini": "gpt-4o-mini-free",
|
|
|
|
"gpt-4o": "gpt-4o-free",
|
2024-07-28 10:34:28 +00:00
|
|
|
"gpt-4-turbo": "gpt-4-turbo-2024-04-09",
|
2024-08-29 06:03:32 +00:00
|
|
|
"gpt-4o": "gpt-4o-2024-08-06",
|
|
|
|
"gpt-4": "gpt-4-0613",
|
2024-08-31 06:47:39 +00:00
|
|
|
|
2024-07-25 06:21:55 +00:00
|
|
|
"claude-3-opus": "claude-3-opus-20240229",
|
|
|
|
"claude-3-opus": "claude-3-opus-20240229-aws",
|
|
|
|
"claude-3-opus": "claude-3-opus-20240229-gcp",
|
|
|
|
"claude-3-sonnet": "claude-3-sonnet-20240229",
|
|
|
|
"claude-3-5-sonnet": "claude-3-5-sonnet-20240620",
|
|
|
|
"claude-3-haiku": "claude-3-haiku-20240307",
|
2024-08-29 06:03:32 +00:00
|
|
|
"claude-2.1": "claude-2.1",
|
2024-08-31 06:47:39 +00:00
|
|
|
|
2024-07-25 06:21:55 +00:00
|
|
|
"gemini-pro": "gemini-1.0-pro-latest",
|
|
|
|
"gemini-flash": "gemini-1.5-flash-latest",
|
2024-08-29 06:03:32 +00:00
|
|
|
"gemini-pro": "gemini-1.5-pro-latest",
|
2024-01-23 18:44:48 +00:00
|
|
|
}
|
2024-08-31 06:47:39 +00:00
|
|
|
|
2024-05-18 05:37:37 +00:00
|
|
|
_auth_code = ""
|
2023-12-31 21:59:24 +00:00
|
|
|
_cookie_jar = None
|
2023-07-28 10:07:17 +00:00
|
|
|
|
2024-08-29 06:03:32 +00:00
|
|
|
@classmethod
|
2024-08-31 06:47:39 +00:00
|
|
|
def get_model(cls, model: str) -> str:
|
|
|
|
"""
|
|
|
|
Retrieve the internal model identifier based on the provided model name or alias.
|
|
|
|
"""
|
|
|
|
if model in cls.model_aliases:
|
|
|
|
model = cls.model_aliases[model]
|
|
|
|
if model not in models:
|
|
|
|
raise ValueError(f"Model '{model}' is not supported.")
|
|
|
|
return model
|
2024-08-29 06:03:32 +00:00
|
|
|
|
2024-08-31 06:47:39 +00:00
|
|
|
@classmethod
|
|
|
|
def is_supported(cls, model: str) -> bool:
|
|
|
|
"""
|
|
|
|
Check if the given model is supported.
|
|
|
|
"""
|
|
|
|
return model in models or model in cls.model_aliases
|
2024-08-29 06:03:32 +00:00
|
|
|
|
2023-08-27 23:43:45 +00:00
|
|
|
@classmethod
|
|
|
|
async def create_async_generator(
|
|
|
|
cls,
|
2023-07-28 10:07:17 +00:00
|
|
|
model: str,
|
2023-10-09 08:22:17 +00:00
|
|
|
messages: Messages,
|
2023-08-27 23:43:45 +00:00
|
|
|
auth: str = None,
|
|
|
|
proxy: str = None,
|
2024-01-23 22:48:11 +00:00
|
|
|
connector: BaseConnector = None,
|
2023-08-27 23:43:45 +00:00
|
|
|
**kwargs
|
2023-10-09 08:22:17 +00:00
|
|
|
) -> AsyncResult:
|
2024-08-31 06:47:39 +00:00
|
|
|
model = cls.get_model(model)
|
|
|
|
|
2023-07-28 10:07:17 +00:00
|
|
|
headers = {
|
2023-08-27 23:43:45 +00:00
|
|
|
"authority": "liaobots.com",
|
|
|
|
"content-type": "application/json",
|
2023-09-05 15:27:24 +00:00
|
|
|
"origin": cls.url,
|
2023-10-23 07:46:25 +00:00
|
|
|
"referer": f"{cls.url}/",
|
2023-08-27 23:43:45 +00:00
|
|
|
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36",
|
2023-07-28 10:07:17 +00:00
|
|
|
}
|
2023-08-27 23:43:45 +00:00
|
|
|
async with ClientSession(
|
2023-12-31 21:59:24 +00:00
|
|
|
headers=headers,
|
2024-01-23 22:48:11 +00:00
|
|
|
cookie_jar=cls._cookie_jar,
|
2024-02-26 10:25:07 +00:00
|
|
|
connector=get_connector(connector, proxy, True)
|
2023-08-27 23:43:45 +00:00
|
|
|
) as session:
|
2024-05-18 05:37:37 +00:00
|
|
|
data = {
|
|
|
|
"conversationId": str(uuid.uuid4()),
|
2024-08-31 06:47:39 +00:00
|
|
|
"model": models[model],
|
2024-05-18 05:37:37 +00:00
|
|
|
"messages": messages,
|
|
|
|
"key": "",
|
|
|
|
"prompt": kwargs.get("system_message", "You are a helpful assistant."),
|
|
|
|
}
|
2023-10-05 03:13:37 +00:00
|
|
|
if not cls._auth_code:
|
|
|
|
async with session.post(
|
|
|
|
"https://liaobots.work/recaptcha/api/login",
|
|
|
|
data={"token": "abcdefghijklmnopqrst"},
|
|
|
|
verify_ssl=False
|
|
|
|
) as response:
|
2024-03-12 01:06:06 +00:00
|
|
|
await raise_for_status(response)
|
2024-05-18 05:37:37 +00:00
|
|
|
try:
|
2024-08-31 06:47:39 +00:00
|
|
|
async with session.post(
|
|
|
|
"https://liaobots.work/api/user",
|
|
|
|
json={"authcode": cls._auth_code},
|
|
|
|
verify_ssl=False
|
|
|
|
) as response:
|
|
|
|
await raise_for_status(response)
|
|
|
|
cls._auth_code = (await response.json(content_type=None))["authCode"]
|
|
|
|
if not cls._auth_code:
|
|
|
|
raise RuntimeError("Empty auth code")
|
|
|
|
cls._cookie_jar = session.cookie_jar
|
2024-05-18 05:37:37 +00:00
|
|
|
async with session.post(
|
|
|
|
"https://liaobots.work/api/chat",
|
|
|
|
json=data,
|
2024-08-31 06:47:39 +00:00
|
|
|
headers={"x-auth-code": cls._auth_code},
|
2024-05-18 05:37:37 +00:00
|
|
|
verify_ssl=False
|
|
|
|
) as response:
|
|
|
|
await raise_for_status(response)
|
|
|
|
async for chunk in response.content.iter_any():
|
|
|
|
if b"<html coupert-item=" in chunk:
|
|
|
|
raise RuntimeError("Invalid session")
|
|
|
|
if chunk:
|
|
|
|
yield chunk.decode(errors="ignore")
|
|
|
|
except:
|
2024-08-31 06:47:39 +00:00
|
|
|
async with session.post(
|
|
|
|
"https://liaobots.work/api/user",
|
|
|
|
json={"authcode": "pTIQr4FTnVRfr"},
|
|
|
|
verify_ssl=False
|
|
|
|
) as response:
|
|
|
|
await raise_for_status(response)
|
|
|
|
cls._auth_code = (await response.json(content_type=None))["authCode"]
|
|
|
|
if not cls._auth_code:
|
|
|
|
raise RuntimeError("Empty auth code")
|
|
|
|
cls._cookie_jar = session.cookie_jar
|
2024-05-18 05:37:37 +00:00
|
|
|
async with session.post(
|
|
|
|
"https://liaobots.work/api/chat",
|
|
|
|
json=data,
|
|
|
|
headers={"x-auth-code": cls._auth_code},
|
|
|
|
verify_ssl=False
|
|
|
|
) as response:
|
|
|
|
await raise_for_status(response)
|
|
|
|
async for chunk in response.content.iter_any():
|
|
|
|
if b"<html coupert-item=" in chunk:
|
|
|
|
raise RuntimeError("Invalid session")
|
|
|
|
if chunk:
|
|
|
|
yield chunk.decode(errors="ignore")
|
2024-07-08 20:49:38 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
async def initialize_auth_code(cls, session: ClientSession) -> None:
|
|
|
|
"""
|
|
|
|
Initialize the auth code by making the necessary login requests.
|
|
|
|
"""
|
|
|
|
async with session.post(
|
|
|
|
"https://liaobots.work/api/user",
|
|
|
|
json={"authcode": "pTIQr4FTnVRfr"},
|
|
|
|
verify_ssl=False
|
|
|
|
) as response:
|
|
|
|
await raise_for_status(response)
|
|
|
|
cls._auth_code = (await response.json(content_type=None))["authCode"]
|
|
|
|
if not cls._auth_code:
|
|
|
|
raise RuntimeError("Empty auth code")
|
|
|
|
cls._cookie_jar = session.cookie_jar
|
2024-08-31 06:47:39 +00:00
|
|
|
|
2024-07-08 20:49:38 +00:00
|
|
|
@classmethod
|
|
|
|
async def ensure_auth_code(cls, session: ClientSession) -> None:
|
|
|
|
"""
|
|
|
|
Ensure the auth code is initialized, and if not, perform the initialization.
|
|
|
|
"""
|
|
|
|
if not cls._auth_code:
|
|
|
|
await cls.initialize_auth_code(session)
|