mirror of https://github.com/xtekky/gpt4free
Merge branch 'main' into patch-1
commit
01294db699
@ -1,87 +1,82 @@
|
|||||||
import json
|
import os, json, uuid, requests
|
||||||
import os
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
import requests
|
from Crypto.Cipher import AES
|
||||||
from Crypto.Cipher import AES
|
from ..typing import Any, CreateResult
|
||||||
|
|
||||||
from ..typing import Any, CreateResult
|
|
||||||
from .base_provider import BaseProvider
|
from .base_provider import BaseProvider
|
||||||
|
|
||||||
|
|
||||||
class GetGpt(BaseProvider):
|
class GetGpt(BaseProvider):
|
||||||
url = "https://chat.getgpt.world/"
|
url = 'https://chat.getgpt.world/'
|
||||||
supports_stream = True
|
supports_stream = True
|
||||||
working = True
|
working = True
|
||||||
supports_gpt_35_turbo = True
|
supports_gpt_35_turbo = True
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_completion(
|
def create_completion(
|
||||||
model: str,
|
model: str,
|
||||||
messages: list[dict[str, str]],
|
messages: list[dict[str, str]],
|
||||||
stream: bool,
|
stream: bool, **kwargs: Any) -> CreateResult:
|
||||||
**kwargs: Any,
|
|
||||||
) -> CreateResult:
|
|
||||||
headers = {
|
headers = {
|
||||||
"Content-Type": "application/json",
|
'Content-Type' : 'application/json',
|
||||||
"Referer": "https://chat.getgpt.world/",
|
'Referer' : 'https://chat.getgpt.world/',
|
||||||
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36",
|
'user-agent' : 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36',
|
||||||
}
|
}
|
||||||
|
|
||||||
data = json.dumps(
|
data = json.dumps(
|
||||||
{
|
{
|
||||||
"messages": messages,
|
'messages' : messages,
|
||||||
"frequency_penalty": kwargs.get("frequency_penalty", 0),
|
'frequency_penalty' : kwargs.get('frequency_penalty', 0),
|
||||||
"max_tokens": kwargs.get("max_tokens", 4000),
|
'max_tokens' : kwargs.get('max_tokens', 4000),
|
||||||
"model": "gpt-3.5-turbo",
|
'model' : 'gpt-3.5-turbo',
|
||||||
"presence_penalty": kwargs.get("presence_penalty", 0),
|
'presence_penalty' : kwargs.get('presence_penalty', 0),
|
||||||
"temperature": kwargs.get("temperature", 1),
|
'temperature' : kwargs.get('temperature', 1),
|
||||||
"top_p": kwargs.get("top_p", 1),
|
'top_p' : kwargs.get('top_p', 1),
|
||||||
"stream": True,
|
'stream' : True,
|
||||||
"uuid": str(uuid.uuid4()),
|
'uuid' : str(uuid.uuid4())
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
res = requests.post(
|
res = requests.post('https://chat.getgpt.world/api/chat/stream',
|
||||||
"https://chat.getgpt.world/api/chat/stream",
|
headers=headers, json={'signature': _encrypt(data)}, stream=True)
|
||||||
headers=headers,
|
|
||||||
json={"signature": _encrypt(data)},
|
|
||||||
stream=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
res.raise_for_status()
|
res.raise_for_status()
|
||||||
for line in res.iter_lines():
|
for line in res.iter_lines():
|
||||||
if b"content" in line:
|
if b'content' in line:
|
||||||
line_json = json.loads(line.decode("utf-8").split("data: ")[1])
|
line_json = json.loads(line.decode('utf-8').split('data: ')[1])
|
||||||
yield (line_json["choices"][0]["delta"]["content"])
|
yield (line_json['choices'][0]['delta']['content'])
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@property
|
@property
|
||||||
def params(cls):
|
def params(cls):
|
||||||
params = [
|
params = [
|
||||||
("model", "str"),
|
('model', 'str'),
|
||||||
("messages", "list[dict[str, str]]"),
|
('messages', 'list[dict[str, str]]'),
|
||||||
("stream", "bool"),
|
('stream', 'bool'),
|
||||||
("temperature", "float"),
|
('temperature', 'float'),
|
||||||
("presence_penalty", "int"),
|
('presence_penalty', 'int'),
|
||||||
("frequency_penalty", "int"),
|
('frequency_penalty', 'int'),
|
||||||
("top_p", "int"),
|
('top_p', 'int'),
|
||||||
("max_tokens", "int"),
|
('max_tokens', 'int'),
|
||||||
]
|
]
|
||||||
param = ", ".join([": ".join(p) for p in params])
|
param = ', '.join([': '.join(p) for p in params])
|
||||||
return f"g4f.provider.{cls.__name__} supports: ({param})"
|
return f'g4f.provider.{cls.__name__} supports: ({param})'
|
||||||
|
|
||||||
|
|
||||||
def _encrypt(e: str):
|
def _encrypt(e: str):
|
||||||
t = os.urandom(8).hex().encode("utf-8")
|
t = os.urandom(8).hex().encode('utf-8')
|
||||||
n = os.urandom(8).hex().encode("utf-8")
|
n = os.urandom(8).hex().encode('utf-8')
|
||||||
r = e.encode("utf-8")
|
r = e.encode('utf-8')
|
||||||
cipher = AES.new(t, AES.MODE_CBC, n)
|
|
||||||
|
cipher = AES.new(t, AES.MODE_CBC, n)
|
||||||
ciphertext = cipher.encrypt(_pad_data(r))
|
ciphertext = cipher.encrypt(_pad_data(r))
|
||||||
return ciphertext.hex() + t.decode("utf-8") + n.decode("utf-8")
|
|
||||||
|
return ciphertext.hex() + t.decode('utf-8') + n.decode('utf-8')
|
||||||
|
|
||||||
|
|
||||||
def _pad_data(data: bytes) -> bytes:
|
def _pad_data(data: bytes) -> bytes:
|
||||||
block_size = AES.block_size
|
block_size = AES.block_size
|
||||||
padding_size = block_size - len(data) % block_size
|
padding_size = block_size - len(data) % block_size
|
||||||
padding = bytes([padding_size] * padding_size)
|
padding = bytes([padding_size] * padding_size)
|
||||||
|
|
||||||
return data + padding
|
return data + padding
|
||||||
|
@ -1,65 +1,66 @@
|
|||||||
from .Acytoo import Acytoo
|
from .Acytoo import Acytoo
|
||||||
from .Aichat import Aichat
|
from .Aichat import Aichat
|
||||||
from .Ails import Ails
|
from .Ails import Ails
|
||||||
from .AiService import AiService
|
from .AiService import AiService
|
||||||
from .AItianhu import AItianhu
|
from .AItianhu import AItianhu
|
||||||
from .Bard import Bard
|
from .Bard import Bard
|
||||||
|
from .Bing import Bing
|
||||||
|
from .ChatgptAi import ChatgptAi
|
||||||
|
from .ChatgptLogin import ChatgptLogin
|
||||||
|
from .DeepAi import DeepAi
|
||||||
|
from .DfeHub import DfeHub
|
||||||
|
from .EasyChat import EasyChat
|
||||||
|
from .Forefront import Forefront
|
||||||
|
from .GetGpt import GetGpt
|
||||||
|
from .H2o import H2o
|
||||||
|
from .Hugchat import Hugchat
|
||||||
|
from .Liaobots import Liaobots
|
||||||
|
from .Lockchat import Lockchat
|
||||||
|
from .Opchatgpts import Opchatgpts
|
||||||
|
from .OpenaiChat import OpenaiChat
|
||||||
|
from .Raycast import Raycast
|
||||||
|
from .Theb import Theb
|
||||||
|
from .Vercel import Vercel
|
||||||
|
from .Wewordle import Wewordle
|
||||||
|
from .You import You
|
||||||
|
from .Yqcloud import Yqcloud
|
||||||
|
from .Equing import Equing
|
||||||
|
from .FastGpt import FastGpt
|
||||||
|
from .V50 import V50
|
||||||
|
from .Wuguokai import Wuguokai
|
||||||
|
|
||||||
from .base_provider import BaseProvider
|
from .base_provider import BaseProvider
|
||||||
from .Bing import Bing
|
|
||||||
from .ChatgptAi import ChatgptAi
|
|
||||||
from .ChatgptLogin import ChatgptLogin
|
|
||||||
from .DeepAi import DeepAi
|
|
||||||
from .DfeHub import DfeHub
|
|
||||||
from .EasyChat import EasyChat
|
|
||||||
from .Forefront import Forefront
|
|
||||||
from .GetGpt import GetGpt
|
|
||||||
from .H2o import H2o
|
|
||||||
from .Hugchat import Hugchat
|
|
||||||
from .Liaobots import Liaobots
|
|
||||||
from .Lockchat import Lockchat
|
|
||||||
from .Opchatgpts import Opchatgpts
|
|
||||||
from .OpenaiChat import OpenaiChat
|
|
||||||
from .Raycast import Raycast
|
|
||||||
from .Theb import Theb
|
|
||||||
from .Vercel import Vercel
|
|
||||||
from .Wewordle import Wewordle
|
|
||||||
from .You import You
|
|
||||||
from .Yqcloud import Yqcloud
|
|
||||||
from .Equing import Equing
|
|
||||||
from .FastGpt import FastGpt
|
|
||||||
from .V50 import V50
|
|
||||||
from .Wuguokai import Wuguokai
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"BaseProvider",
|
'BaseProvider',
|
||||||
"Acytoo",
|
'Acytoo',
|
||||||
"Aichat",
|
'Aichat',
|
||||||
"Ails",
|
'Ails',
|
||||||
"AiService",
|
'AiService',
|
||||||
"AItianhu",
|
'AItianhu',
|
||||||
"Bard",
|
'Bard',
|
||||||
"Bing",
|
'Bing',
|
||||||
"ChatgptAi",
|
'ChatgptAi',
|
||||||
"ChatgptLogin",
|
'ChatgptLogin',
|
||||||
"DeepAi",
|
'DeepAi',
|
||||||
"DfeHub",
|
'DfeHub',
|
||||||
"EasyChat",
|
'EasyChat',
|
||||||
"Forefront",
|
'Forefront',
|
||||||
"GetGpt",
|
'GetGpt',
|
||||||
"H2o",
|
'H2o',
|
||||||
"Hugchat",
|
'Hugchat',
|
||||||
"Liaobots",
|
'Liaobots',
|
||||||
"Lockchat",
|
'Lockchat',
|
||||||
"Opchatgpts",
|
'Opchatgpts',
|
||||||
"Raycast",
|
'Raycast',
|
||||||
"OpenaiChat",
|
'OpenaiChat',
|
||||||
"Theb",
|
'Theb',
|
||||||
"Vercel",
|
'Vercel',
|
||||||
"Wewordle",
|
'Wewordle',
|
||||||
"You",
|
'You',
|
||||||
"Yqcloud",
|
'Yqcloud',
|
||||||
"Equing",
|
'Equing',
|
||||||
"FastGpt",
|
'FastGpt',
|
||||||
"Wuguokai"
|
'Wuguokai',
|
||||||
"V50"
|
'V50'
|
||||||
]
|
]
|
||||||
|
@ -1,45 +1,42 @@
|
|||||||
from . import models
|
from . import models
|
||||||
from .Provider import BaseProvider
|
from .Provider import BaseProvider
|
||||||
from .typing import Any, CreateResult, Union
|
from .typing import Any, CreateResult, Union
|
||||||
|
|
||||||
logging = False
|
logging = False
|
||||||
|
|
||||||
|
|
||||||
class ChatCompletion:
|
class ChatCompletion:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create(
|
def create(
|
||||||
model: Union[models.Model, str],
|
model : Union[models.Model, str],
|
||||||
messages: list[dict[str, str]],
|
messages : list[dict[str, str]],
|
||||||
provider: Union[type[BaseProvider], None] = None,
|
provider : Union[type[BaseProvider], None] = None,
|
||||||
stream: bool = False,
|
stream : bool = False,
|
||||||
auth: Union[str, None] = None,
|
auth : Union[str, None] = None, **kwargs: Any) -> Union[CreateResult, str]:
|
||||||
**kwargs: Any,
|
|
||||||
) -> Union[CreateResult, str]:
|
|
||||||
if isinstance(model, str):
|
if isinstance(model, str):
|
||||||
try:
|
try:
|
||||||
model = models.ModelUtils.convert[model]
|
model = models.ModelUtils.convert[model]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise Exception(f"The model: {model} does not exist")
|
raise Exception(f'The model: {model} does not exist')
|
||||||
|
|
||||||
provider = model.best_provider if provider == None else provider
|
provider = model.best_provider if provider == None else provider
|
||||||
|
|
||||||
if not provider.working:
|
if not provider.working:
|
||||||
raise Exception(f"{provider.__name__} is not working")
|
raise Exception(f'{provider.__name__} is not working')
|
||||||
|
|
||||||
if provider.needs_auth and not auth:
|
if provider.needs_auth and not auth:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f'ValueError: {provider.__name__} requires authentication (use auth="cookie or token or jwt ..." param)'
|
f'ValueError: {provider.__name__} requires authentication (use auth=\'cookie or token or jwt ...\' param)')
|
||||||
)
|
|
||||||
if provider.needs_auth:
|
if provider.needs_auth:
|
||||||
kwargs["auth"] = auth
|
kwargs['auth'] = auth
|
||||||
|
|
||||||
if not provider.supports_stream and stream:
|
if not provider.supports_stream and stream:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f"ValueError: {provider.__name__} does not support 'stream' argument"
|
f'ValueError: {provider.__name__} does not support "stream" argument')
|
||||||
)
|
|
||||||
|
|
||||||
if logging:
|
if logging:
|
||||||
print(f"Using {provider.__name__} provider")
|
print(f'Using {provider.__name__} provider')
|
||||||
|
|
||||||
result = provider.create_completion(model.name, messages, stream, **kwargs)
|
result = provider.create_completion(model.name, messages, stream, **kwargs)
|
||||||
return result if stream else "".join(result)
|
return result if stream else ''.join(result)
|
||||||
|
@ -1,15 +1,14 @@
|
|||||||
from typing import Any, AsyncGenerator, Generator, NewType, Tuple, TypedDict, Union
|
from typing import Any, AsyncGenerator, Generator, NewType, Tuple, TypedDict, Union
|
||||||
|
|
||||||
SHA256 = NewType("sha_256_hash", str)
|
SHA256 = NewType('sha_256_hash', str)
|
||||||
CreateResult = Generator[str, None, None]
|
CreateResult = Generator[str, None, None]
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Any",
|
'Any',
|
||||||
"AsyncGenerator",
|
'AsyncGenerator',
|
||||||
"Generator",
|
'Generator',
|
||||||
"Tuple",
|
'Tuple',
|
||||||
"TypedDict",
|
'TypedDict',
|
||||||
"SHA256",
|
'SHA256',
|
||||||
"CreateResult",
|
'CreateResult',
|
||||||
]
|
]
|
Loading…
Reference in New Issue