Add auto support params method

pull/1274/head
Heiner Lohaus 11 months ago
parent d4c8f3e8d5
commit 702837a33a

@ -72,20 +72,7 @@ class AItianhu(AsyncGeneratorProvider):
if "detail" not in line:
raise RuntimeError(f"Response: {line}")
content = line["detail"]["choices"][0]["delta"].get("content")
if content:
if content := line["detail"]["choices"][0]["delta"].get(
"content"
):
yield content
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
("proxy", "str"),
("temperature", "float"),
("top_p", "int"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

@ -59,14 +59,3 @@ class ChatBase(AsyncGeneratorProvider):
if incorrect_response in response_data:
raise RuntimeError("Incorrect response")
yield stream.decode()
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

@ -57,16 +57,6 @@ class ChatForAi(AsyncGeneratorProvider):
raise RuntimeError(f"Response: {chunk.decode()}")
yield chunk.decode()
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"
def generate_signature(timestamp: int, message: str, id: str):
buffer = f"{timestamp}:{id}:{message}:7YN8z6d6"

@ -47,16 +47,6 @@ class FreeGpt(AsyncGeneratorProvider):
raise RuntimeError("Rate limit reached")
yield chunk
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"
def generate_signature(timestamp: int, message: str, secret: str = ""):
data = f"{timestamp}:{message}:{secret}"

@ -71,15 +71,3 @@ class GeekGpt(BaseProvider):
if content:
yield content
@classmethod
@property
def params(cls):
params = [
('model', 'str'),
('messages', 'list[dict[str, str]]'),
('stream', 'bool'),
('temperature', 'float'),
]
param = ', '.join([': '.join(p) for p in params])
return f'g4f.provider.{cls.__name__} supports: ({param})'

@ -62,21 +62,5 @@ class GptGo(AsyncGeneratorProvider):
line = json.loads(line[len(start):-1])
if line["choices"][0]["finish_reason"] == "stop":
break
content = line["choices"][0]["delta"].get("content"):
if content:
if content := line["choices"][0]["delta"].get("content"):
yield content
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
("proxy", "str"),
("temperature", "float"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

@ -97,17 +97,3 @@ class Liaobots(AsyncGeneratorProvider):
async for stream in response.content.iter_any():
if stream:
yield stream.decode()
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
("proxy", "str"),
("auth", "str"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

@ -57,15 +57,3 @@ class Opchatgpts(AsyncGeneratorProvider):
yield line["data"]
elif line["type"] == "end":
break
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
("proxy", "str"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

@ -56,20 +56,3 @@ class Ylokh(AsyncGeneratorProvider):
else:
chat = await response.json()
yield chat["choices"][0]["message"].get("content")
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
("proxy", "str"),
("timeout", "int"),
("temperature", "float"),
("top_p", "float"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

@ -3,6 +3,8 @@ from __future__ import annotations
from asyncio import AbstractEventLoop
from concurrent.futures import ThreadPoolExecutor
from abc import ABC, abstractmethod
from inspect import signature, Parameter
from types import NoneType
from .helper import get_event_loop, get_cookies, format_prompt
from ..typing import CreateResult, AsyncResult, Messages
@ -56,13 +58,38 @@ class BaseProvider(ABC):
@classmethod
@property
def params(cls) -> str:
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"
if issubclass(cls, AsyncGeneratorProvider):
sig = signature(cls.create_async_generator)
elif issubclass(cls, AsyncProvider):
sig = signature(cls.create_async)
else:
sig = signature(cls.create_completion)
def get_type_name(annotation: type) -> str:
if hasattr(annotation, "__name__"):
annotation = annotation.__name__
elif isinstance(annotation, NoneType):
annotation = "None"
return str(annotation)
args = "";
for name, param in sig.parameters.items():
if name in ("self", "kwargs"):
continue
if name == "stream" and not cls.supports_stream:
continue
if args:
args += ", "
args += "\n"
args += " " + name
if name != "model" and param.annotation is not Parameter.empty:
args += f": {get_type_name(param.annotation)}"
if param.default == "":
args += ' = ""'
elif param.default is not Parameter.empty:
args += f" = {param.default}"
return f"g4f.Provider.{cls.__name__} supports: ({args}\n)"
class AsyncProvider(BaseProvider):

@ -40,18 +40,6 @@ class Aibn(AsyncGeneratorProvider):
async for chunk in response.iter_content():
yield chunk.decode()
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
("temperature", "float"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"
def generate_signature(timestamp: int, message: str, secret: str = "undefined"):
data = f"{timestamp}:{message}:{secret}"

@ -77,19 +77,6 @@ class Ails(AsyncGeneratorProvider):
yield token
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
("temperature", "float"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"
def _hash(json_data: dict[str, str]) -> SHA256:
base_string: str = f'{json_data["t"]}:{json_data["m"]}:WI,2rU#_r:r~aF4aJ36[.Z(/8Rv93Rf:{len(json_data["m"])}'

@ -70,15 +70,3 @@ class Aivvm(BaseProvider):
yield chunk.decode("utf-8")
except UnicodeDecodeError:
yield chunk.decode("unicode-escape")
@classmethod
@property
def params(cls):
params = [
('model', 'str'),
('messages', 'list[dict[str, str]]'),
('stream', 'bool'),
('temperature', 'float'),
]
param = ', '.join([': '.join(p) for p in params])
return f'g4f.provider.{cls.__name__} supports: ({param})'

@ -45,14 +45,3 @@ class ChatgptDuo(AsyncProvider):
@classmethod
def get_sources(cls):
return cls._sources
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

@ -48,16 +48,3 @@ class CodeLinkAva(AsyncGeneratorProvider):
line = json.loads(line[6:-1])
if content := line["choices"][0]["delta"].get("content"):
yield content
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
("temperature", "float"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

@ -60,18 +60,3 @@ class DfeHub(BaseProvider):
if b"content" in chunk:
data = json.loads(chunk.decode().split("data: ")[1])
yield (data["choices"][0]["delta"]["content"])
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
("temperature", "float"),
("presence_penalty", "int"),
("frequency_penalty", "int"),
("top_p", "int"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

@ -88,20 +88,3 @@ class EasyChat(BaseProvider):
if len(splitData) > 1:
yield json.loads(splitData[1])["choices"][0]["delta"]["content"]
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
("temperature", "float"),
("presence_penalty", "int"),
("frequency_penalty", "int"),
("top_p", "int"),
("active_server", "int"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

@ -67,14 +67,3 @@ class Equing(BaseProvider):
line_json = json.loads(line.decode('utf-8').split('data: ')[1])
if token := line_json['choices'][0]['delta'].get('content'):
yield token
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

@ -75,14 +75,3 @@ class FastGpt(BaseProvider):
yield token
except:
continue
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

@ -55,22 +55,6 @@ class GetGpt(BaseProvider):
line_json = json.loads(line.decode('utf-8').split('data: ')[1])
yield (line_json['choices'][0]['delta']['content'])
@classmethod
@property
def params(cls):
params = [
('model', 'str'),
('messages', 'list[dict[str, str]]'),
('stream', 'bool'),
('temperature', 'float'),
('presence_penalty', 'int'),
('frequency_penalty', 'int'),
('top_p', 'int'),
('max_tokens', 'int'),
]
param = ', '.join([': '.join(p) for p in params])
return f'g4f.provider.{cls.__name__} supports: ({param})'
def _encrypt(e: str):
t = os.urandom(8).hex().encode('utf-8')

@ -87,21 +87,3 @@ class H2o(AsyncGeneratorProvider):
proxy=proxy,
) as response:
response.raise_for_status()
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
("temperature", "float"),
("truncate", "int"),
("max_new_tokens", "int"),
("do_sample", "bool"),
("repetition_penalty", "float"),
("return_full_text", "bool"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

@ -49,15 +49,3 @@ class Lockchat(BaseProvider):
token = json.loads(token.decode("utf-8").split("data: ")[1])
if token := token["choices"][0]["delta"].get("content"):
yield (token)
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
("temperature", "float"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

@ -98,18 +98,6 @@ class Myshell(AsyncGeneratorProvider):
raise RuntimeError(f"Received unexpected message: {data_type}")
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"
def generate_timestamp() -> str:
return str(
int(

@ -59,16 +59,3 @@ class V50(BaseProvider):
if "https://fk1.v50.ltd" not in response.text:
yield response.text
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
("temperature", "float"),
("top_p", "int"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

@ -51,17 +51,3 @@ class Vitalentum(AsyncGeneratorProvider):
line = json.loads(line[6:-1])
if content := line["choices"][0]["delta"].get("content"):
yield content
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool"),
("proxy", "str"),
("temperature", "float"),
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"

@ -55,14 +55,3 @@ class Wuguokai(BaseProvider):
yield _split[1].strip()
else:
yield _split[0].strip()
@classmethod
@property
def params(cls):
params = [
("model", "str"),
("messages", "list[dict[str, str]]"),
("stream", "bool")
]
param = ", ".join([": ".join(p) for p in params])
return f"g4f.provider.{cls.__name__} supports: ({param})"
Loading…
Cancel
Save