Merge branch 'main' into bom

This commit is contained in:
Tekky 2023-10-05 19:02:06 +01:00 committed by GitHub
commit 6dc2502740
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 54 additions and 2 deletions

View File

@ -375,6 +375,7 @@ if __name__ == "__main__":
| [chat.ylokh.xyz](https://chat.ylokh.xyz) | `g4f.Provider.Ylokh` | ✔️ | ❌ | ✔️ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
| [you.com](https://you.com) | `g4f.Provider.You` | ✔️ | ❌ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
| [chat9.yqcloud.top](https://chat9.yqcloud.top/) | `g4f.Provider.Yqcloud` | ✔️ | ❌ | ✔️ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
| [cromicle.top](https://cromicle.top) | `g4f.Provider.Cromicle` | ✔️ | ❌ | ✔️ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
| [aiservice.vercel.app](https://aiservice.vercel.app/) | `g4f.Provider.AiService` | ✔️ | ❌ | ❌ | ❌ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
| [chat.dfehub.com](https://chat.dfehub.com/) | `g4f.Provider.DfeHub` | ✔️ | ❌ | ✔️ | ❌ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
| [free.easychat.work](https://free.easychat.work) | `g4f.Provider.EasyChat` | ✔️ | ❌ | ✔️ | ❌ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
@ -573,4 +574,4 @@ along with this program. If not, see <https://www.gnu.org/licenses/>.
<a href="https://github.com/xtekky/gpt4free/stargazers">
<img width="500" alt="Star History Chart" src="https://api.star-history.com/svg?repos=xtekky/gpt4free&type=Date">
</a>
</a>

50
g4f/Provider/Cromicle.py Normal file
View File

@ -0,0 +1,50 @@
from __future__ import annotations
from aiohttp import ClientSession
from hashlib import sha256
from ..typing import AsyncGenerator
from .base_provider import AsyncGeneratorProvider
class Cromicle(AsyncGeneratorProvider):
url = 'https://cromicle.top'
working = True
supports_gpt_35_turbo = True
@classmethod
async def create_async_generator(
cls,
model: str,
messages: list[dict[str, str]],
proxy: str = None,
**kwargs
) -> AsyncGenerator:
message = messages[-1]["content"]
async with ClientSession(
headers=_create_header()
) as session:
async with session.post(
cls.url + '/chat',
proxy=proxy,
json=_create_payload(message, **kwargs)
) as response:
response.raise_for_status()
async for stream in response.content.iter_any():
if stream:
yield stream.decode()
def _create_header():
return {
'accept': '*/*',
'content-type': 'application/json',
}
def _create_payload(message: str):
return {
'message' : message,
'token' : 'abc',
'hash' : sha256('abc'.encode() + message.encode()).hexdigest()
}

View File

@ -26,6 +26,7 @@ from .Provider import (
Bing,
You,
H2o,
Cromicle,
)
@dataclass(unsafe_hash=True)
@ -61,7 +62,7 @@ gpt_35_turbo = Model(
name = 'gpt-3.5-turbo',
base_provider = 'openai',
best_provider = RetryProvider([
Aibn, Aichat, Aivvm, ChatForAi, ChatgptAi, ChatgptLogin, DeepAi, FreeGpt, GptGo, Myshell, Ylokh,
DeepAi, ChatgptLogin, ChatgptAi, Aivvm, GptGo, AItianhu, Aichat, AItianhuSpace, Myshell, Aibn, ChatForAi, FreeGpt, Ylokh, Cromicle
])
)