GigaChat models add

This commit is contained in:
vlad 2024-03-14 01:29:49 +03:00
parent 9c381f2906
commit f07173de4f
4 changed files with 157 additions and 2 deletions

97
g4f/Provider/GigaChat.py Normal file
View File

@ -0,0 +1,97 @@
from __future__ import annotations
import base64
import os
import ssl
import time
import uuid
import json
from aiohttp import ClientSession, BaseConnector, TCPConnector
from g4f.requests import raise_for_status
from ..typing import AsyncResult, Messages, ImageType
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ..image import to_bytes, is_accepted_format
from ..errors import MissingAuthError
from .helper import get_connector
access_token = ''
token_expires_at = 0
ssl_ctx = ssl.create_default_context(
cafile=os.path.dirname(__file__) + '/gigachat_crt/russian_trusted_root_ca_pem.crt')
class GigaChat(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://developers.sber.ru/gigachat"
working = True
supports_message_history = True
supports_system_message = True
supports_stream = True
needs_auth = True
default_model = "GigaChat:latest"
models = ["GigaChat:latest", "GigaChat-Plus", "GigaChat-Pro"]
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
stream: bool = True,
proxy: str = None,
api_key: str = None,
scope: str = "GIGACHAT_API_PERS",
update_interval: float = 0,
**kwargs
) -> AsyncResult:
global access_token, token_expires_at
model = cls.get_model(model)
if not api_key:
raise MissingAuthError('Missing "api_key"')
connector = TCPConnector(ssl_context=ssl_ctx)
async with ClientSession(connector=get_connector(connector, proxy)) as session:
if token_expires_at - int(time.time() * 1000) < 60000:
async with session.post(url="https://ngw.devices.sberbank.ru:9443/api/v2/oauth",
headers={"Authorization": f"Bearer {api_key}",
"RqUID": str(uuid.uuid4()),
"Content-Type": "application/x-www-form-urlencoded"},
data={"scope": scope}) as response:
await raise_for_status(response)
data = await response.json()
access_token = data['access_token']
token_expires_at = data['expires_at']
async with session.post(url="https://gigachat.devices.sberbank.ru/api/v1/chat/completions",
headers={"Authorization": f"Bearer {access_token}"},
json={
"model": model,
"messages": messages,
"stream": stream,
"update_interval": update_interval,
**kwargs
}) as response:
await raise_for_status(response)
async for line in response.content:
if not stream:
yield json.loads(line.decode("utf-8"))['choices'][0]['message']['content']
return
if line and line.startswith(b"data:"):
line = line[6:-1] # remove "data: " prefix and "\n" suffix
if line.strip() == b"[DONE]":
return
else:
msg = json.loads(line.decode("utf-8"))['choices'][0]
content = msg['delta']['content']
if content:
yield content
if 'finish_reason' in msg:
return

View File

@ -24,6 +24,7 @@ from .DeepInfra import DeepInfra
from .FlowGpt import FlowGpt from .FlowGpt import FlowGpt
from .FreeChatgpt import FreeChatgpt from .FreeChatgpt import FreeChatgpt
from .FreeGpt import FreeGpt from .FreeGpt import FreeGpt
from .GigaChat import GigaChat
from .GeminiPro import GeminiPro from .GeminiPro import GeminiPro
from .GeminiProChat import GeminiProChat from .GeminiProChat import GeminiProChat
from .GptTalkRu import GptTalkRu from .GptTalkRu import GptTalkRu

View File

@ -0,0 +1,33 @@
-----BEGIN CERTIFICATE-----
MIIFwjCCA6qgAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwcDELMAkGA1UEBhMCUlUx
PzA9BgNVBAoMNlRoZSBNaW5pc3RyeSBvZiBEaWdpdGFsIERldmVsb3BtZW50IGFu
ZCBDb21tdW5pY2F0aW9uczEgMB4GA1UEAwwXUnVzc2lhbiBUcnVzdGVkIFJvb3Qg
Q0EwHhcNMjIwMzAxMjEwNDE1WhcNMzIwMjI3MjEwNDE1WjBwMQswCQYDVQQGEwJS
VTE/MD0GA1UECgw2VGhlIE1pbmlzdHJ5IG9mIERpZ2l0YWwgRGV2ZWxvcG1lbnQg
YW5kIENvbW11bmljYXRpb25zMSAwHgYDVQQDDBdSdXNzaWFuIFRydXN0ZWQgUm9v
dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMfFOZ8pUAL3+r2n
qqE0Zp52selXsKGFYoG0GM5bwz1bSFtCt+AZQMhkWQheI3poZAToYJu69pHLKS6Q
XBiwBC1cvzYmUYKMYZC7jE5YhEU2bSL0mX7NaMxMDmH2/NwuOVRj8OImVa5s1F4U
zn4Kv3PFlDBjjSjXKVY9kmjUBsXQrIHeaqmUIsPIlNWUnimXS0I0abExqkbdrXbX
YwCOXhOO2pDUx3ckmJlCMUGacUTnylyQW2VsJIyIGA8V0xzdaeUXg0VZ6ZmNUr5Y
Ber/EAOLPb8NYpsAhJe2mXjMB/J9HNsoFMBFJ0lLOT/+dQvjbdRZoOT8eqJpWnVD
U+QL/qEZnz57N88OWM3rabJkRNdU/Z7x5SFIM9FrqtN8xewsiBWBI0K6XFuOBOTD
4V08o4TzJ8+Ccq5XlCUW2L48pZNCYuBDfBh7FxkB7qDgGDiaftEkZZfApRg2E+M9
G8wkNKTPLDc4wH0FDTijhgxR3Y4PiS1HL2Zhw7bD3CbslmEGgfnnZojNkJtcLeBH
BLa52/dSwNU4WWLubaYSiAmA9IUMX1/RpfpxOxd4Ykmhz97oFbUaDJFipIggx5sX
ePAlkTdWnv+RWBxlJwMQ25oEHmRguNYf4Zr/Rxr9cS93Y+mdXIZaBEE0KS2iLRqa
OiWBki9IMQU4phqPOBAaG7A+eP8PAgMBAAGjZjBkMB0GA1UdDgQWBBTh0YHlzlpf
BKrS6badZrHF+qwshzAfBgNVHSMEGDAWgBTh0YHlzlpfBKrS6badZrHF+qwshzAS
BgNVHRMBAf8ECDAGAQH/AgEEMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsF
AAOCAgEAALIY1wkilt/urfEVM5vKzr6utOeDWCUczmWX/RX4ljpRdgF+5fAIS4vH
tmXkqpSCOVeWUrJV9QvZn6L227ZwuE15cWi8DCDal3Ue90WgAJJZMfTshN4OI8cq
W9E4EG9wglbEtMnObHlms8F3CHmrw3k6KmUkWGoa+/ENmcVl68u/cMRl1JbW2bM+
/3A+SAg2c6iPDlehczKx2oa95QW0SkPPWGuNA/CE8CpyANIhu9XFrj3RQ3EqeRcS
AQQod1RNuHpfETLU/A2gMmvn/w/sx7TB3W5BPs6rprOA37tutPq9u6FTZOcG1Oqj
C/B7yTqgI7rbyvox7DEXoX7rIiEqyNNUguTk/u3SZ4VXE2kmxdmSh3TQvybfbnXV
4JbCZVaqiZraqc7oZMnRoWrXRG3ztbnbes/9qhRGI7PqXqeKJBztxRTEVj8ONs1d
WN5szTwaPIvhkhO3CO5ErU2rVdUr89wKpNXbBODFKRtgxUT70YpmJ46VVaqdAhOZ
D9EUUn4YaeLaS8AjSF/h7UkjOibNc4qVDiPP+rkehFWM66PVnP1Msh93tc+taIfC
EYVMxjh8zNbFuoc7fzvvrFILLe7ifvEIUqSVIC/AzplM/Jxw7buXFeGP1qVCBEHq
391d/9RAfaZ12zkwFsl+IKwE/OZxW8AHa9i1p4GO0YSNuczzEm4=
-----END CERTIFICATE-----

View File

@ -15,6 +15,7 @@ from .Provider import (
ChatgptAi, ChatgptAi,
DeepInfra, DeepInfra,
ChatBase, ChatBase,
GigaChat,
Liaobots, Liaobots,
FreeGpt, FreeGpt,
Llama2, Llama2,
@ -95,6 +96,24 @@ gpt_4_turbo = Model(
best_provider = Bing best_provider = Bing
) )
gigachat = Model(
name = 'GigaChat:latest',
base_provider = 'gigachat',
best_provider = GigaChat
)
gigachat_plus = Model(
name = 'GigaChat-Plus',
base_provider = 'gigachat',
best_provider = GigaChat
)
gigachat_pro = Model(
name = 'GigaChat-Pro',
base_provider = 'gigachat',
best_provider = GigaChat
)
llama2_7b = Model( llama2_7b = Model(
name = "meta-llama/Llama-2-7b-chat-hf", name = "meta-llama/Llama-2-7b-chat-hf",
base_provider = 'meta', base_provider = 'meta',
@ -272,6 +291,11 @@ class ModelUtils:
'codellama-34b-instruct': codellama_34b_instruct, 'codellama-34b-instruct': codellama_34b_instruct,
'codellama-70b-instruct': codellama_70b_instruct, 'codellama-70b-instruct': codellama_70b_instruct,
# GigaChat
'gigachat' : gigachat,
'gigachat_plus': gigachat_plus,
'gigachat_pro' : gigachat_pro,
'mixtral-8x7b': mixtral_8x7b, 'mixtral-8x7b': mixtral_8x7b,
'mistral-7b': mistral_7b, 'mistral-7b': mistral_7b,
'dolphin-mixtral-8x7b': dolphin_mixtral_8x7b, 'dolphin-mixtral-8x7b': dolphin_mixtral_8x7b,