mirror of https://github.com/xtekky/gpt4free
Merge branch 'main' into patch-1
commit
01294db699
@ -1,87 +1,82 @@
|
||||
import json
|
||||
import os
|
||||
import uuid
|
||||
import os, json, uuid, requests
|
||||
|
||||
import requests
|
||||
from Crypto.Cipher import AES
|
||||
|
||||
from ..typing import Any, CreateResult
|
||||
from Crypto.Cipher import AES
|
||||
from ..typing import Any, CreateResult
|
||||
from .base_provider import BaseProvider
|
||||
|
||||
|
||||
class GetGpt(BaseProvider):
|
||||
url = "https://chat.getgpt.world/"
|
||||
supports_stream = True
|
||||
working = True
|
||||
url = 'https://chat.getgpt.world/'
|
||||
supports_stream = True
|
||||
working = True
|
||||
supports_gpt_35_turbo = True
|
||||
|
||||
@staticmethod
|
||||
def create_completion(
|
||||
model: str,
|
||||
messages: list[dict[str, str]],
|
||||
stream: bool,
|
||||
**kwargs: Any,
|
||||
) -> CreateResult:
|
||||
stream: bool, **kwargs: Any) -> CreateResult:
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Referer": "https://chat.getgpt.world/",
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36",
|
||||
'Content-Type' : 'application/json',
|
||||
'Referer' : 'https://chat.getgpt.world/',
|
||||
'user-agent' : 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36',
|
||||
}
|
||||
|
||||
data = json.dumps(
|
||||
{
|
||||
"messages": messages,
|
||||
"frequency_penalty": kwargs.get("frequency_penalty", 0),
|
||||
"max_tokens": kwargs.get("max_tokens", 4000),
|
||||
"model": "gpt-3.5-turbo",
|
||||
"presence_penalty": kwargs.get("presence_penalty", 0),
|
||||
"temperature": kwargs.get("temperature", 1),
|
||||
"top_p": kwargs.get("top_p", 1),
|
||||
"stream": True,
|
||||
"uuid": str(uuid.uuid4()),
|
||||
'messages' : messages,
|
||||
'frequency_penalty' : kwargs.get('frequency_penalty', 0),
|
||||
'max_tokens' : kwargs.get('max_tokens', 4000),
|
||||
'model' : 'gpt-3.5-turbo',
|
||||
'presence_penalty' : kwargs.get('presence_penalty', 0),
|
||||
'temperature' : kwargs.get('temperature', 1),
|
||||
'top_p' : kwargs.get('top_p', 1),
|
||||
'stream' : True,
|
||||
'uuid' : str(uuid.uuid4())
|
||||
}
|
||||
)
|
||||
|
||||
res = requests.post(
|
||||
"https://chat.getgpt.world/api/chat/stream",
|
||||
headers=headers,
|
||||
json={"signature": _encrypt(data)},
|
||||
stream=True,
|
||||
)
|
||||
res = requests.post('https://chat.getgpt.world/api/chat/stream',
|
||||
headers=headers, json={'signature': _encrypt(data)}, stream=True)
|
||||
|
||||
res.raise_for_status()
|
||||
for line in res.iter_lines():
|
||||
if b"content" in line:
|
||||
line_json = json.loads(line.decode("utf-8").split("data: ")[1])
|
||||
yield (line_json["choices"][0]["delta"]["content"])
|
||||
if b'content' in line:
|
||||
line_json = json.loads(line.decode('utf-8').split('data: ')[1])
|
||||
yield (line_json['choices'][0]['delta']['content'])
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def params(cls):
|
||||
params = [
|
||||
("model", "str"),
|
||||
("messages", "list[dict[str, str]]"),
|
||||
("stream", "bool"),
|
||||
("temperature", "float"),
|
||||
("presence_penalty", "int"),
|
||||
("frequency_penalty", "int"),
|
||||
("top_p", "int"),
|
||||
("max_tokens", "int"),
|
||||
('model', 'str'),
|
||||
('messages', 'list[dict[str, str]]'),
|
||||
('stream', 'bool'),
|
||||
('temperature', 'float'),
|
||||
('presence_penalty', 'int'),
|
||||
('frequency_penalty', 'int'),
|
||||
('top_p', 'int'),
|
||||
('max_tokens', 'int'),
|
||||
]
|
||||
param = ", ".join([": ".join(p) for p in params])
|
||||
return f"g4f.provider.{cls.__name__} supports: ({param})"
|
||||
param = ', '.join([': '.join(p) for p in params])
|
||||
return f'g4f.provider.{cls.__name__} supports: ({param})'
|
||||
|
||||
|
||||
def _encrypt(e: str):
|
||||
t = os.urandom(8).hex().encode("utf-8")
|
||||
n = os.urandom(8).hex().encode("utf-8")
|
||||
r = e.encode("utf-8")
|
||||
cipher = AES.new(t, AES.MODE_CBC, n)
|
||||
t = os.urandom(8).hex().encode('utf-8')
|
||||
n = os.urandom(8).hex().encode('utf-8')
|
||||
r = e.encode('utf-8')
|
||||
|
||||
cipher = AES.new(t, AES.MODE_CBC, n)
|
||||
ciphertext = cipher.encrypt(_pad_data(r))
|
||||
return ciphertext.hex() + t.decode("utf-8") + n.decode("utf-8")
|
||||
|
||||
return ciphertext.hex() + t.decode('utf-8') + n.decode('utf-8')
|
||||
|
||||
|
||||
def _pad_data(data: bytes) -> bytes:
|
||||
block_size = AES.block_size
|
||||
block_size = AES.block_size
|
||||
padding_size = block_size - len(data) % block_size
|
||||
padding = bytes([padding_size] * padding_size)
|
||||
padding = bytes([padding_size] * padding_size)
|
||||
|
||||
return data + padding
|
||||
|
@ -1,65 +1,66 @@
|
||||
from .Acytoo import Acytoo
|
||||
from .Aichat import Aichat
|
||||
from .Ails import Ails
|
||||
from .AiService import AiService
|
||||
from .AItianhu import AItianhu
|
||||
from .Bard import Bard
|
||||
from .Acytoo import Acytoo
|
||||
from .Aichat import Aichat
|
||||
from .Ails import Ails
|
||||
from .AiService import AiService
|
||||
from .AItianhu import AItianhu
|
||||
from .Bard import Bard
|
||||
from .Bing import Bing
|
||||
from .ChatgptAi import ChatgptAi
|
||||
from .ChatgptLogin import ChatgptLogin
|
||||
from .DeepAi import DeepAi
|
||||
from .DfeHub import DfeHub
|
||||
from .EasyChat import EasyChat
|
||||
from .Forefront import Forefront
|
||||
from .GetGpt import GetGpt
|
||||
from .H2o import H2o
|
||||
from .Hugchat import Hugchat
|
||||
from .Liaobots import Liaobots
|
||||
from .Lockchat import Lockchat
|
||||
from .Opchatgpts import Opchatgpts
|
||||
from .OpenaiChat import OpenaiChat
|
||||
from .Raycast import Raycast
|
||||
from .Theb import Theb
|
||||
from .Vercel import Vercel
|
||||
from .Wewordle import Wewordle
|
||||
from .You import You
|
||||
from .Yqcloud import Yqcloud
|
||||
from .Equing import Equing
|
||||
from .FastGpt import FastGpt
|
||||
from .V50 import V50
|
||||
from .Wuguokai import Wuguokai
|
||||
|
||||
from .base_provider import BaseProvider
|
||||
from .Bing import Bing
|
||||
from .ChatgptAi import ChatgptAi
|
||||
from .ChatgptLogin import ChatgptLogin
|
||||
from .DeepAi import DeepAi
|
||||
from .DfeHub import DfeHub
|
||||
from .EasyChat import EasyChat
|
||||
from .Forefront import Forefront
|
||||
from .GetGpt import GetGpt
|
||||
from .H2o import H2o
|
||||
from .Hugchat import Hugchat
|
||||
from .Liaobots import Liaobots
|
||||
from .Lockchat import Lockchat
|
||||
from .Opchatgpts import Opchatgpts
|
||||
from .OpenaiChat import OpenaiChat
|
||||
from .Raycast import Raycast
|
||||
from .Theb import Theb
|
||||
from .Vercel import Vercel
|
||||
from .Wewordle import Wewordle
|
||||
from .You import You
|
||||
from .Yqcloud import Yqcloud
|
||||
from .Equing import Equing
|
||||
from .FastGpt import FastGpt
|
||||
from .V50 import V50
|
||||
from .Wuguokai import Wuguokai
|
||||
|
||||
__all__ = [
|
||||
"BaseProvider",
|
||||
"Acytoo",
|
||||
"Aichat",
|
||||
"Ails",
|
||||
"AiService",
|
||||
"AItianhu",
|
||||
"Bard",
|
||||
"Bing",
|
||||
"ChatgptAi",
|
||||
"ChatgptLogin",
|
||||
"DeepAi",
|
||||
"DfeHub",
|
||||
"EasyChat",
|
||||
"Forefront",
|
||||
"GetGpt",
|
||||
"H2o",
|
||||
"Hugchat",
|
||||
"Liaobots",
|
||||
"Lockchat",
|
||||
"Opchatgpts",
|
||||
"Raycast",
|
||||
"OpenaiChat",
|
||||
"Theb",
|
||||
"Vercel",
|
||||
"Wewordle",
|
||||
"You",
|
||||
"Yqcloud",
|
||||
"Equing",
|
||||
"FastGpt",
|
||||
"Wuguokai"
|
||||
"V50"
|
||||
'BaseProvider',
|
||||
'Acytoo',
|
||||
'Aichat',
|
||||
'Ails',
|
||||
'AiService',
|
||||
'AItianhu',
|
||||
'Bard',
|
||||
'Bing',
|
||||
'ChatgptAi',
|
||||
'ChatgptLogin',
|
||||
'DeepAi',
|
||||
'DfeHub',
|
||||
'EasyChat',
|
||||
'Forefront',
|
||||
'GetGpt',
|
||||
'H2o',
|
||||
'Hugchat',
|
||||
'Liaobots',
|
||||
'Lockchat',
|
||||
'Opchatgpts',
|
||||
'Raycast',
|
||||
'OpenaiChat',
|
||||
'Theb',
|
||||
'Vercel',
|
||||
'Wewordle',
|
||||
'You',
|
||||
'Yqcloud',
|
||||
'Equing',
|
||||
'FastGpt',
|
||||
'Wuguokai',
|
||||
'V50'
|
||||
]
|
||||
|
@ -1,45 +1,42 @@
|
||||
from . import models
|
||||
from .Provider import BaseProvider
|
||||
from .typing import Any, CreateResult, Union
|
||||
from . import models
|
||||
from .Provider import BaseProvider
|
||||
from .typing import Any, CreateResult, Union
|
||||
|
||||
logging = False
|
||||
|
||||
|
||||
class ChatCompletion:
|
||||
@staticmethod
|
||||
def create(
|
||||
model: Union[models.Model, str],
|
||||
messages: list[dict[str, str]],
|
||||
provider: Union[type[BaseProvider], None] = None,
|
||||
stream: bool = False,
|
||||
auth: Union[str, None] = None,
|
||||
**kwargs: Any,
|
||||
) -> Union[CreateResult, str]:
|
||||
model : Union[models.Model, str],
|
||||
messages : list[dict[str, str]],
|
||||
provider : Union[type[BaseProvider], None] = None,
|
||||
stream : bool = False,
|
||||
auth : Union[str, None] = None, **kwargs: Any) -> Union[CreateResult, str]:
|
||||
|
||||
if isinstance(model, str):
|
||||
try:
|
||||
model = models.ModelUtils.convert[model]
|
||||
except KeyError:
|
||||
raise Exception(f"The model: {model} does not exist")
|
||||
raise Exception(f'The model: {model} does not exist')
|
||||
|
||||
provider = model.best_provider if provider == None else provider
|
||||
|
||||
if not provider.working:
|
||||
raise Exception(f"{provider.__name__} is not working")
|
||||
raise Exception(f'{provider.__name__} is not working')
|
||||
|
||||
if provider.needs_auth and not auth:
|
||||
raise Exception(
|
||||
f'ValueError: {provider.__name__} requires authentication (use auth="cookie or token or jwt ..." param)'
|
||||
)
|
||||
f'ValueError: {provider.__name__} requires authentication (use auth=\'cookie or token or jwt ...\' param)')
|
||||
|
||||
if provider.needs_auth:
|
||||
kwargs["auth"] = auth
|
||||
kwargs['auth'] = auth
|
||||
|
||||
if not provider.supports_stream and stream:
|
||||
raise Exception(
|
||||
f"ValueError: {provider.__name__} does not support 'stream' argument"
|
||||
)
|
||||
f'ValueError: {provider.__name__} does not support "stream" argument')
|
||||
|
||||
if logging:
|
||||
print(f"Using {provider.__name__} provider")
|
||||
print(f'Using {provider.__name__} provider')
|
||||
|
||||
result = provider.create_completion(model.name, messages, stream, **kwargs)
|
||||
return result if stream else "".join(result)
|
||||
return result if stream else ''.join(result)
|
||||
|
@ -1,15 +1,14 @@
|
||||
from typing import Any, AsyncGenerator, Generator, NewType, Tuple, TypedDict, Union
|
||||
|
||||
SHA256 = NewType("sha_256_hash", str)
|
||||
SHA256 = NewType('sha_256_hash', str)
|
||||
CreateResult = Generator[str, None, None]
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Any",
|
||||
"AsyncGenerator",
|
||||
"Generator",
|
||||
"Tuple",
|
||||
"TypedDict",
|
||||
"SHA256",
|
||||
"CreateResult",
|
||||
'Any',
|
||||
'AsyncGenerator',
|
||||
'Generator',
|
||||
'Tuple',
|
||||
'TypedDict',
|
||||
'SHA256',
|
||||
'CreateResult',
|
||||
]
|
Loading…
Reference in New Issue