diff --git a/README.md b/README.md
index 4d2ce6fc..4a84c914 100644
--- a/README.md
+++ b/README.md
@@ -178,26 +178,100 @@ for message in response:
print(message)
```
-providers:
+##### Providers:
```py
from g4f.Provider import (
Acytoo,
Aichat,
Ails,
- AiService,
- AItianhu,
Bard,
Bing,
ChatgptAi,
ChatgptLogin,
DeepAi,
- GetGpt
+ EasyChat,
+ Equing,
+ GetGpt,
+ H2o,
+ HuggingChat,
+ Opchatgpts,
+ OpenAssistant,
+ OpenaiChat,
+ Raycast,
+ Theb,
+ Vercel,
+ Wewordle,
+ Wuguokai,
+ You,
+ Yqcloud
)
+# Usage:
+response = g4f.ChatCompletion.create(..., provider=ProviderName)
+```
+##### Needs cookies:
-# usage:
-response = g4f.ChatCompletion.create(..., provider=ProviderName)
+Many providers need cookies to work.
+In Bing you need a session, where you have passed the captcha.
+And in others providers you have to log-in into your account.
+If you run the g4l package locally,
+cookies from your browsers are readed with `get_cookies`.
+Else you have pass them in the parameter `cookies`:
+```py
+import g4f
+from g4f.Provider import (
+ Bard,
+ Bing,
+ H2o,
+ HuggingChat,
+ OpenAssistant,
+ OpenaiChat,
+ You,
+)
+# Usage:
+response = g4f.ChatCompletion.create(
+ model=g4f.models.default,
+ messages=[{"role": "user", "content": "Hello"}],
+ provider=Bard,
+ #cookies=g4f.get_cookies(".google.com"),
+ cookies={"cookie_name": "value", "cookie_name2": "value2"},
+ auth=True
+)
+```
+
+##### Async support:
+
+Run providers `async` to improve speed / performance.
+The full execution time corresponds to the execution time of the slowest provider.
+
+```py
+import g4f, asyncio
+
+async def run_async():
+ _providers = [
+ g4f.Provider.Bard,
+ g4f.Provider.Bing,
+ g4f.Provider.H2o,
+ g4f.Provider.HuggingChat,
+ g4f.Provider.Liaobots,
+ g4f.Provider.OpenAssistant,
+ g4f.Provider.OpenaiChat,
+ g4f.Provider.You,
+ g4f.Provider.Yqcloud,
+ ]
+ responses = [
+ provider.create_async(
+ model=None,
+ messages=[{"role": "user", "content": "Hello"}],
+ )
+ for provider in _providers
+ ]
+ responses = await asyncio.gather(*responses)
+ for idx, provider in enumerate(_providers):
+ print(f"{provider.__name__}:", responses[idx])
+
+asyncio.run(run_async())
```
### interference openai-proxy api (use with openai python package)
@@ -247,32 +321,40 @@ if __name__ == "__main__":
### gpt-3.5 / gpt-4
-| Website | Provider | gpt-3.5 | gpt-4 | Streaming | Status | Auth |
-| ----------------------------------------------------------------------------- | ------------------------- | ------- | ----- | --------- | ---------------------------------------------------------- | ---- |
-| [www.aitianhu.com](https://www.aitianhu.com/api/chat-process) | g4f.Provider.AItianhu | ✔️ | ❌ | ❌ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
-| [chat.acytoo.com](https://chat.acytoo.com/api/completions) | g4f.Provider.Acytoo | ✔️ | ❌ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
-| [aiservice.vercel.app](https://aiservice.vercel.app/api/chat/answer) | g4f.Provider.AiService | ✔️ | ❌ | ❌ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
-| [chat-gpt.org](https://chat-gpt.org/chat) | g4f.Provider.Aichat | ✔️ | ❌ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
-| [ai.ls](https://ai.ls) | g4f.Provider.Ails | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
-| [bard.google.com](https://bard.google.com) | g4f.Provider.Bard | ❌ | ❌ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ✔️ |
-| [bing.com](https://bing.com/chat) | g4f.Provider.Bing | ❌ | ✔️ | ❌ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
-| [chatgpt.ai](https://chatgpt.ai/gpt-4/) | g4f.Provider.ChatgptAi | ❌ | ✔️ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
-| [chatgptlogin.ac](https://chatgptlogin.ac) | g4f.Provider.ChatgptLogin | ✔️ | ❌ | ❌ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
-| [deepai.org](https://deepai.org) | g4f.Provider.DeepAi | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
-| [chat.dfehub.com](https://chat.dfehub.com/api/chat) | g4f.Provider.DfeHub | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
-| [free.easychat.work](https://free.easychat.work) | g4f.Provider.EasyChat | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
-| [forefront.com](https://forefront.com) | g4f.Provider.Forefront | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
-| [chat.getgpt.world](https://chat.getgpt.world/) | g4f.Provider.GetGpt | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
-| [gpt-gm.h2o.ai](https://gpt-gm.h2o.ai) | g4f.Provider.H2o | ❌ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
-| [liaobots.com](https://liaobots.com) | g4f.Provider.Liaobots | ✔️ | ✔️ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ✔️ |
-| [supertest.lockchat.app](http://supertest.lockchat.app) | g4f.Provider.Lockchat | ✔️ | ✔️ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
-| [opchatgpts.net](https://opchatgpts.net) | g4f.Provider.Opchatgpts | ✔️ | ❌ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
-| [backend.raycast.com](https://backend.raycast.com/api/v1/ai/chat_completions) | g4f.Provider.Raycast | ✔️ | ✔️ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ✔️ |
-| [theb.ai](https://theb.ai) | g4f.Provider.Theb | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
-| [play.vercel.ai](https://play.vercel.ai) | g4f.Provider.Vercel | ✔️ | ❌ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
-| [wewordle.org](https://wewordle.org/gptapi/v1/android/turbo) | g4f.Provider.Wewordle | ✔️ | ❌ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
-| [you.com](https://you.com) | g4f.Provider.You | ✔️ | ❌ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
-| [chat9.yqcloud.top](https://chat9.yqcloud.top/) | g4f.Provider.Yqcloud | ✔️ | ❌ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
+| Website| Provider| gpt-3.5 | gpt-4 | Streaming | Status | Auth |
+| ------ | ------- | ------- | ----- | --------- | ------ | ---- |
+| [chat.acytoo.com](https://chat.acytoo.com/) | g4f.provider.Acytoo | ✔️ | ❌ | ❌ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ |
+| [chat-gpt.org](https://chat-gpt.org/chat) | g4f.provider.Aichat | ✔️ | ❌ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
+| [ai.ls](https://ai.ls) | g4f.provider.Ails | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ |
+| [bard.google.com](https://bard.google.com) | g4f.provider.Bard | ❌ | ❌ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ✔️ |
+| [bing.com](https://bing.com/chat) | g4f.provider.Bing | ❌ | ✔️ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ✔️ |
+| [chatgpt.ai](https://chatgpt.ai/gpt-4/) | g4f.provider.ChatgptAi | ❌ | ✔️ | ❌ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ |
+| [opchatgpts.net](https://opchatgpts.net) | g4f.provider.ChatgptLogin | ✔️ | ❌ | ❌ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ |
+| [deepai.org](https://deepai.org) | g4f.provider.DeepAi | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
+| [free.easychat.work](https://free.easychat.work) | g4f.provider.EasyChat | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ |
+| [next.eqing.tech](https://next.eqing.tech/) | g4f.provider.Equing | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ |
+| [chat.getgpt.world](https://chat.getgpt.world/) | g4f.provider.GetGpt | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
+| [gpt-gm.h2o.ai](https://gpt-gm.h2o.ai) | g4f.provider.H2o | ❌ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
+| [huggingface.co](https://huggingface.co/chat/) | g4f.provider.HuggingChat | ❌ | ❌ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ✔️ |
+| [liaobots.com](https://liaobots.com) | g4f.provider.Liaobots | ✔️ | ✔️ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
+| [opchatgpts.net](https://opchatgpts.net) | g4f.provider.Opchatgpts | ✔️ | ❌ | ❌ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ |
+| [open-assistant.io](https://open-assistant.io/chat) | g4f.provider.OpenAssistant | ❌ | ❌ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ✔️ |
+| [chat.openai.com](https://chat.openai.com) | g4f.provider.OpenaiChat | ✔️ | ✔️ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ✔️ |
+| [raycast.com](https://raycast.com) | g4f.provider.Raycast | ✔️ | ✔️ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ✔️ |
+| [theb.ai](https://theb.ai) | g4f.provider.Theb | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ✔️ |
+| [play.vercel.ai](https://play.vercel.ai) | g4f.provider.Vercel | ✔️ | ❌ | ❌ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ |
+| [wewordle.org](https://wewordle.org/) | g4f.provider.Wewordle | ✔️ | ❌ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
+| [chat.wuguokai.xyz](https://chat.wuguokai.xyz) | g4f.provider.Wuguokai | ✔️ | ❌ | ❌ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ |
+| [you.com](https://you.com) | g4f.provider.You | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
+| [chat9.yqcloud.top](https://chat9.yqcloud.top/) | g4f.provider.Yqcloud | ✔️ | ❌ | ❌ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
+| [www.aitianhu.com](https://www.aitianhu.com/) | g4f.provider.AItianhu | ✔️ | ❌ | ❌ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
+| [aiservice.vercel.app](https://aiservice.vercel.app/) | g4f.provider.AiService | ✔️ | ❌ | ❌ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
+| [chat.dfehub.com](https://chat.dfehub.com/) | g4f.provider.DfeHub | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
+| [chat9.fastgpt.me](https://chat9.fastgpt.me/) | g4f.provider.FastGpt | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
+| [forefront.com](https://forefront.com) | g4f.provider.Forefront | ✔️ | ❌ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
+| [supertest.lockchat.app](http://supertest.lockchat.app) | g4f.provider.Lockchat | ✔️ | ✔️ | ✔️ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
+| [p5.v50.ltd](https://p5.v50.ltd) | g4f.provider.V50 | ✔️ | ❌ | ❌ | ![Inactive](https://img.shields.io/badge/Inactive-red) | ❌ |
+
### Other Models
@@ -360,6 +442,20 @@ if __name__ == "__main__":
|
|
+
+ Action Translate Readme |
+ |
+ |
+ |
+ |
+
+
+ Langchain Document GPT |
+ |
+ |
+ |
+ |
+
diff --git a/g4f/Provider/AItianhu.py b/g4f/Provider/AItianhu.py
index e8e5714a..0982d3c6 100644
--- a/g4f/Provider/AItianhu.py
+++ b/g4f/Provider/AItianhu.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import json
import requests
@@ -7,7 +9,7 @@ from .base_provider import BaseProvider
class AItianhu(BaseProvider):
- url = "https://www.aitianhu.com/api/chat-process"
+ url = "https://www.aitianhu.com/"
working = False
supports_gpt_35_turbo = True
@@ -15,13 +17,10 @@ class AItianhu(BaseProvider):
def create_completion(
model: str,
messages: list[dict[str, str]],
- stream: bool,
- **kwargs: Any,
- ) -> CreateResult:
- base = ""
- for message in messages:
- base += "%s: %s\n" % (message["role"], message["content"])
- base += "assistant:"
+ stream: bool, **kwargs: Any) -> CreateResult:
+
+ base = "\n".join(f"{message['role']}: {message['content']}" for message in messages)
+ base += "\nassistant: "
headers = {
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36"
diff --git a/g4f/Provider/Acytoo.py b/g4f/Provider/Acytoo.py
index 2edd9efd..48a3a344 100644
--- a/g4f/Provider/Acytoo.py
+++ b/g4f/Provider/Acytoo.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import time
import requests
@@ -7,42 +9,42 @@ from .base_provider import BaseProvider
class Acytoo(BaseProvider):
- url = "https://chat.acytoo.com/api/completions"
- working = True
+ url = 'https://chat.acytoo.com/'
+ working = True
supports_gpt_35_turbo = True
- @staticmethod
+ @classmethod
def create_completion(
+ cls,
model: str,
messages: list[dict[str, str]],
- stream: bool,
- **kwargs: Any,
- ) -> CreateResult:
- headers = _create_header()
- payload = _create_payload(messages, kwargs.get('temperature', 0.5))
-
- url = "https://chat.acytoo.com/api/completions"
- response = requests.post(url=url, headers=headers, json=payload)
+ stream: bool, **kwargs: Any) -> CreateResult:
+
+ response = requests.post(f'{cls.url}api/completions',
+ headers=_create_header(), json=_create_payload(messages, kwargs.get('temperature', 0.5)))
+
response.raise_for_status()
- response.encoding = "utf-8"
+ response.encoding = 'utf-8'
+
yield response.text
def _create_header():
return {
- "accept": "*/*",
- "content-type": "application/json",
+ 'accept': '*/*',
+ 'content-type': 'application/json',
}
def _create_payload(messages: list[dict[str, str]], temperature):
payload_messages = [
- message | {"createdAt": int(time.time()) * 1000} for message in messages
+ message | {'createdAt': int(time.time()) * 1000} for message in messages
]
+
return {
- "key": "",
- "model": "gpt-3.5-turbo",
- "messages": payload_messages,
- "temperature": temperature,
- "password": "",
- }
+ 'key' : '',
+ 'model' : 'gpt-3.5-turbo',
+ 'messages' : payload_messages,
+ 'temperature' : temperature,
+ 'password' : ''
+ }
\ No newline at end of file
diff --git a/g4f/Provider/AiService.py b/g4f/Provider/AiService.py
index 2c0d5de2..2b5a6e7d 100644
--- a/g4f/Provider/AiService.py
+++ b/g4f/Provider/AiService.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import requests
from ..typing import Any, CreateResult
@@ -5,7 +7,7 @@ from .base_provider import BaseProvider
class AiService(BaseProvider):
- url = "https://aiservice.vercel.app/api/chat/answer"
+ url = "https://aiservice.vercel.app/"
working = False
supports_gpt_35_turbo = True
@@ -16,10 +18,8 @@ class AiService(BaseProvider):
stream: bool,
**kwargs: Any,
) -> CreateResult:
- base = ""
- for message in messages:
- base += "%s: %s\n" % (message["role"], message["content"])
- base += "assistant:"
+ base = "\n".join(f"{message['role']}: {message['content']}" for message in messages)
+ base += "\nassistant: "
headers = {
"accept": "*/*",
diff --git a/g4f/Provider/Aichat.py b/g4f/Provider/Aichat.py
index a1d90db7..59640533 100644
--- a/g4f/Provider/Aichat.py
+++ b/g4f/Provider/Aichat.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import requests
from ..typing import Any, CreateResult
@@ -5,22 +7,18 @@ from .base_provider import BaseProvider
class Aichat(BaseProvider):
- url = "https://chat-gpt.org/chat"
- working = True
+ url = "https://chat-gpt.org/chat"
+ working = True
supports_gpt_35_turbo = True
@staticmethod
def create_completion(
model: str,
messages: list[dict[str, str]],
- stream: bool,
- **kwargs: Any,
- ) -> CreateResult:
- base = ""
-
- for message in messages:
- base += "%s: %s\n" % (message["role"], message["content"])
- base += "assistant:"
+ stream: bool, **kwargs: Any) -> CreateResult:
+
+ chat = "\n".join(f"{message['role']}: {message['content']}" for message in messages)
+ chat += "\nassistant: "
headers = {
"authority": "chat-gpt.org",
diff --git a/g4f/Provider/Ails.py b/g4f/Provider/Ails.py
index 52b3745d..4eb21729 100644
--- a/g4f/Provider/Ails.py
+++ b/g4f/Provider/Ails.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import hashlib
import json
import time
@@ -11,18 +13,17 @@ from .base_provider import BaseProvider
class Ails(BaseProvider):
- url: str = "https://ai.ls"
- working = True
- supports_stream = True
+ url: str = "https://ai.ls"
+ working = True
+ supports_stream = True
supports_gpt_35_turbo = True
@staticmethod
def create_completion(
model: str,
messages: list[dict[str, str]],
- stream: bool,
- **kwargs: Any,
- ) -> CreateResult:
+ stream: bool, **kwargs: Any) -> CreateResult:
+
headers = {
"authority": "api.caipacity.com",
"accept": "*/*",
@@ -72,6 +73,8 @@ class Ails(BaseProvider):
if b"content" in token:
completion_chunk = json.loads(token.decode().replace("data: ", ""))
token = completion_chunk["choices"][0]["delta"].get("content")
+ if "ai.ls" in token.lower() or "ai.ci" in token.lower():
+ raise Exception("Response Error: " + token)
if token != None:
yield token
diff --git a/g4f/Provider/Bard.py b/g4f/Provider/Bard.py
index cbe728cd..2137d820 100644
--- a/g4f/Provider/Bard.py
+++ b/g4f/Provider/Bard.py
@@ -1,51 +1,33 @@
+from __future__ import annotations
+
import json
import random
import re
-import browser_cookie3
from aiohttp import ClientSession
-import asyncio
-from ..typing import Any, CreateResult
-from .base_provider import BaseProvider
+from .base_provider import AsyncProvider, format_prompt, get_cookies
+
-class Bard(BaseProvider):
+class Bard(AsyncProvider):
url = "https://bard.google.com"
needs_auth = True
working = True
- @classmethod
- def create_completion(
- cls,
- model: str,
- messages: list[dict[str, str]],
- stream: bool,
- proxy: str = None,
- cookies: dict = {},
- **kwargs: Any,
- ) -> CreateResult:
- yield asyncio.run(cls.create_async(str, messages, proxy, cookies))
-
@classmethod
async def create_async(
cls,
model: str,
messages: list[dict[str, str]],
proxy: str = None,
- cookies: dict = {},
- **kwargs: Any,
+ cookies: dict = None,
+ **kwargs
) -> str:
- if not cookies:
- for cookie in browser_cookie3.load(domain_name='.google.com'):
- cookies[cookie.name] = cookie.value
-
- formatted = "\n".join(
- ["%s: %s" % (message["role"], message["content"]) for message in messages]
- )
- prompt = f"{formatted}\nAssistant:"
-
+ prompt = format_prompt(messages)
if proxy and "://" not in proxy:
proxy = f"http://{proxy}"
+ if not cookies:
+ cookies = get_cookies(".google.com")
headers = {
'authority': 'bard.google.com',
@@ -62,10 +44,11 @@ class Bard(BaseProvider):
) as session:
async with session.get(cls.url, proxy=proxy) as response:
text = await response.text()
-
+
match = re.search(r'SNlM0e\":\"(.*?)\"', text)
- if match:
- snlm0e = match.group(1)
+ if not match:
+ raise RuntimeError("No snlm0e value.")
+ snlm0e = match.group(1)
params = {
'bl': 'boq_assistant-bard-web-server_20230326.21_p0',
diff --git a/g4f/Provider/Bing.py b/g4f/Provider/Bing.py
index 48b5477d..cec82108 100644
--- a/g4f/Provider/Bing.py
+++ b/g4f/Provider/Bing.py
@@ -1,294 +1,94 @@
+from __future__ import annotations
+
import asyncio
import json
import os
import random
-import ssl
-import uuid
import aiohttp
-import certifi
-import requests
-
-from ..typing import Any, AsyncGenerator, CreateResult, Tuple, Union
-from .base_provider import BaseProvider
+from aiohttp import ClientSession
+from ..typing import Any, AsyncGenerator, CreateResult, Union
+from .base_provider import AsyncGeneratorProvider, get_cookies
-class Bing(BaseProvider):
- url = "https://bing.com/chat"
- supports_gpt_4 = True
+class Bing(AsyncGeneratorProvider):
+ url = "https://bing.com/chat"
+ needs_auth = True
+ working = True
+ supports_gpt_4 = True
+ supports_stream = True
+
@staticmethod
- def create_completion(
- model: str,
- messages: list[dict[str, str]],
- stream: bool,
- **kwargs: Any,
- ) -> CreateResult:
+ def create_async_generator(
+ model: str,
+ messages: list[dict[str, str]],
+ cookies: dict = None, **kwargs) -> AsyncGenerator:
+
+ if not cookies:
+ cookies = get_cookies(".bing.com")
if len(messages) < 2:
prompt = messages[0]["content"]
- context = False
-
+ context = None
else:
prompt = messages[-1]["content"]
- context = convert(messages[:-1])
-
- response = run(stream_generate(prompt, jailbreak, context))
- for token in response:
- yield token
-
-
-def convert(messages: list[dict[str, str]]):
- context = ""
+ context = create_context(messages[:-1])
+
+ if cookies and "SRCHD" in cookies:
+ #TODO: Will implement proper cookie retrieval later and use a try-except mechanism in 'stream_generate' instead of defaulting the cookie value like this
+ cookies_dict = {
+ 'SRCHD' : cookies["SRCHD"],
+ 'PPLState' : '1',
+ 'KievRPSSecAuth': '',
+ 'SUID' : '',
+ 'SRCHUSR' : '',
+ 'SRCHHPGUSR' : '',
+ }
+
+ return stream_generate(prompt, context, cookies_dict)
- for message in messages:
- context += "[%s](#message)\n%s\n\n" % (message["role"], message["content"])
+def create_context(messages: list[dict[str, str]]):
+ context = "".join(f"[{message['role']}](#message)\n{message['content']}\n\n" for message in messages)
return context
-
-jailbreak = {
- "optionsSets": [
- "saharasugg",
- "enablenewsfc",
- "clgalileo",
- "gencontentv3",
- "nlu_direct_response_filter",
- "deepleo",
- "disable_emoji_spoken_text",
- "responsible_ai_policy_235",
- "enablemm",
- "h3precise"
- # "harmonyv3",
- "dtappid",
- "cricinfo",
- "cricinfov2",
- "dv3sugg",
- "nojbfedge",
- ]
-}
-
-
-ssl_context = ssl.create_default_context()
-ssl_context.load_verify_locations(certifi.where())
-
-
-def _format(msg: dict[str, Any]) -> str:
- return json.dumps(msg, ensure_ascii=False) + Defaults.delimiter
-
-
-async def stream_generate(
- prompt: str,
- mode: dict[str, list[str]] = jailbreak,
- context: Union[bool, str] = False,
-):
- timeout = aiohttp.ClientTimeout(total=900)
- session = aiohttp.ClientSession(timeout=timeout)
-
- conversationId, clientId, conversationSignature = await create_conversation()
-
- wss = await session.ws_connect(
- "wss://sydney.bing.com/sydney/ChatHub",
- ssl=ssl_context,
- autoping=False,
- headers={
- "accept": "application/json",
- "accept-language": "en-US,en;q=0.9",
- "content-type": "application/json",
- "sec-ch-ua": '"Not_A Brand";v="99", "Microsoft Edge";v="110", "Chromium";v="110"',
- "sec-ch-ua-arch": '"x86"',
- "sec-ch-ua-bitness": '"64"',
- "sec-ch-ua-full-version": '"109.0.1518.78"',
- "sec-ch-ua-full-version-list": '"Chromium";v="110.0.5481.192", "Not A(Brand";v="24.0.0.0", "Microsoft Edge";v="110.0.1587.69"',
- "sec-ch-ua-mobile": "?0",
- "sec-ch-ua-model": "",
- "sec-ch-ua-platform": '"Windows"',
- "sec-ch-ua-platform-version": '"15.0.0"',
- "sec-fetch-dest": "empty",
- "sec-fetch-mode": "cors",
- "sec-fetch-site": "same-origin",
- "x-ms-client-request-id": str(uuid.uuid4()),
- "x-ms-useragent": "azsdk-js-api-client-factory/1.0.0-beta.1 core-rest-pipeline/1.10.0 OS/Win32",
- "Referer": "https://www.bing.com/search?q=Bing+AI&showconv=1&FORM=hpcodx",
- "Referrer-Policy": "origin-when-cross-origin",
- "x-forwarded-for": Defaults.ip_address,
- },
- )
-
- await wss.send_str(_format({"protocol": "json", "version": 1}))
- await wss.receive(timeout=900)
-
- argument: dict[str, Any] = {
- **mode,
+class Conversation():
+ def __init__(self, conversationId: str, clientId: str, conversationSignature: str) -> None:
+ self.conversationId = conversationId
+ self.clientId = clientId
+ self.conversationSignature = conversationSignature
+
+async def create_conversation(session: ClientSession) -> Conversation:
+ url = 'https://www.bing.com/turing/conversation/create'
+ async with await session.get(url) as response:
+ response = await response.json()
+ conversationId = response.get('conversationId')
+ clientId = response.get('clientId')
+ conversationSignature = response.get('conversationSignature')
+
+ if not conversationId or not clientId or not conversationSignature:
+ raise Exception('Failed to create conversation.')
+
+ return Conversation(conversationId, clientId, conversationSignature)
+
+async def list_conversations(session: ClientSession) -> list:
+ url = "https://www.bing.com/turing/conversation/chats"
+ async with session.get(url) as response:
+ response = await response.json()
+ return response["chats"]
+
+async def delete_conversation(session: ClientSession, conversation: Conversation) -> list:
+ url = "https://sydney.bing.com/sydney/DeleteSingleConversation"
+ json = {
+ "conversationId": conversation.conversationId,
+ "conversationSignature": conversation.conversationSignature,
+ "participant": {"id": conversation.clientId},
"source": "cib",
- "allowedMessageTypes": Defaults.allowedMessageTypes,
- "sliceIds": Defaults.sliceIds,
- "traceId": os.urandom(16).hex(),
- "isStartOfSession": True,
- "message": Defaults.location
- | {
- "author": "user",
- "inputMethod": "Keyboard",
- "text": prompt,
- "messageType": "Chat",
- },
- "conversationSignature": conversationSignature,
- "participant": {"id": clientId},
- "conversationId": conversationId,
+ "optionsSets": ["autosave"]
}
-
- if context:
- argument["previousMessages"] = [
- {
- "author": "user",
- "description": context,
- "contextType": "WebPage",
- "messageType": "Context",
- "messageId": "discover-web--page-ping-mriduna-----",
- }
- ]
-
- struct: dict[str, list[dict[str, Any]] | str | int] = {
- "arguments": [argument],
- "invocationId": "0",
- "target": "chat",
- "type": 4,
- }
-
- await wss.send_str(_format(struct))
-
- final = False
- draw = False
- resp_txt = ""
- result_text = ""
- resp_txt_no_link = ""
- cache_text = ""
-
- while not final:
- msg = await wss.receive(timeout=900)
- objects = msg.data.split(Defaults.delimiter) # type: ignore
-
- for obj in objects: # type: ignore
- if obj is None or not obj:
- continue
-
- response = json.loads(obj) # type: ignore
- if response.get("type") == 1 and response["arguments"][0].get(
- "messages",
- ):
- if not draw:
- if (
- response["arguments"][0]["messages"][0]["contentOrigin"]
- != "Apology"
- ) and not draw:
- resp_txt = result_text + response["arguments"][0]["messages"][
- 0
- ]["adaptiveCards"][0]["body"][0].get("text", "")
- resp_txt_no_link = result_text + response["arguments"][0][
- "messages"
- ][0].get("text", "")
-
- if response["arguments"][0]["messages"][0].get(
- "messageType",
- ):
- resp_txt = (
- resp_txt
- + response["arguments"][0]["messages"][0][
- "adaptiveCards"
- ][0]["body"][0]["inlines"][0].get("text")
- + "\n"
- )
- result_text = (
- result_text
- + response["arguments"][0]["messages"][0][
- "adaptiveCards"
- ][0]["body"][0]["inlines"][0].get("text")
- + "\n"
- )
-
- if cache_text.endswith(" "):
- final = True
- if wss and not wss.closed:
- await wss.close()
- if session and not session.closed:
- await session.close()
-
- yield (resp_txt.replace(cache_text, ""))
- cache_text = resp_txt
-
- elif response.get("type") == 2:
- if response["item"]["result"].get("error"):
- if wss and not wss.closed:
- await wss.close()
- if session and not session.closed:
- await session.close()
-
- raise Exception(
- f"{response['item']['result']['value']}: {response['item']['result']['message']}"
- )
-
- if draw:
- cache = response["item"]["messages"][1]["adaptiveCards"][0]["body"][
- 0
- ]["text"]
- response["item"]["messages"][1]["adaptiveCards"][0]["body"][0][
- "text"
- ] = (cache + resp_txt)
-
- if (
- response["item"]["messages"][-1]["contentOrigin"] == "Apology"
- and resp_txt
- ):
- response["item"]["messages"][-1]["text"] = resp_txt_no_link
- response["item"]["messages"][-1]["adaptiveCards"][0]["body"][0][
- "text"
- ] = resp_txt
-
- # print('Preserved the message from being deleted', file=sys.stderr)
-
- final = True
- if wss and not wss.closed:
- await wss.close()
- if session and not session.closed:
- await session.close()
-
-
-async def create_conversation() -> Tuple[str, str, str]:
- create = requests.get(
- "https://www.bing.com/turing/conversation/create",
- headers={
- "authority": "edgeservices.bing.com",
- "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
- "accept-language": "en-US,en;q=0.9",
- "cache-control": "max-age=0",
- "sec-ch-ua": '"Chromium";v="110", "Not A(Brand";v="24", "Microsoft Edge";v="110"',
- "sec-ch-ua-arch": '"x86"',
- "sec-ch-ua-bitness": '"64"',
- "sec-ch-ua-full-version": '"110.0.1587.69"',
- "sec-ch-ua-full-version-list": '"Chromium";v="110.0.5481.192", "Not A(Brand";v="24.0.0.0", "Microsoft Edge";v="110.0.1587.69"',
- "sec-ch-ua-mobile": "?0",
- "sec-ch-ua-model": '""',
- "sec-ch-ua-platform": '"Windows"',
- "sec-ch-ua-platform-version": '"15.0.0"',
- "sec-fetch-dest": "document",
- "sec-fetch-mode": "navigate",
- "sec-fetch-site": "none",
- "sec-fetch-user": "?1",
- "upgrade-insecure-requests": "1",
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.69",
- "x-edge-shopping-flag": "1",
- "x-forwarded-for": Defaults.ip_address,
- },
- )
-
- conversationId = create.json().get("conversationId")
- clientId = create.json().get("clientId")
- conversationSignature = create.json().get("conversationSignature")
-
- if not conversationId or not clientId or not conversationSignature:
- raise Exception("Failed to create conversation.")
-
- return conversationId, clientId, conversationSignature
-
+ async with session.post(url, json=json) as response:
+ response = await response.json()
+ return response["result"]["value"] == "Success"
class Defaults:
delimiter = "\x1e"
@@ -309,9 +109,6 @@ class Defaults:
]
sliceIds = [
- # "222dtappid",
- # "225cricinfo",
- # "224locals0"
"winmuid3tf",
"osbsdusgreccf",
"ttstmout",
@@ -349,6 +146,149 @@ class Defaults:
],
}
+ headers = {
+ 'accept': '*/*',
+ 'accept-language': 'en-US,en;q=0.9',
+ 'cache-control': 'max-age=0',
+ 'sec-ch-ua': '"Chromium";v="110", "Not A(Brand";v="24", "Microsoft Edge";v="110"',
+ 'sec-ch-ua-arch': '"x86"',
+ 'sec-ch-ua-bitness': '"64"',
+ 'sec-ch-ua-full-version': '"110.0.1587.69"',
+ 'sec-ch-ua-full-version-list': '"Chromium";v="110.0.5481.192", "Not A(Brand";v="24.0.0.0", "Microsoft Edge";v="110.0.1587.69"',
+ 'sec-ch-ua-mobile': '?0',
+ 'sec-ch-ua-model': '""',
+ 'sec-ch-ua-platform': '"Windows"',
+ 'sec-ch-ua-platform-version': '"15.0.0"',
+ 'sec-fetch-dest': 'document',
+ 'sec-fetch-mode': 'navigate',
+ 'sec-fetch-site': 'none',
+ 'sec-fetch-user': '?1',
+ 'upgrade-insecure-requests': '1',
+ 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.69',
+ 'x-edge-shopping-flag': '1',
+ 'x-forwarded-for': ip_address,
+ }
+
+ optionsSets = {
+ "optionsSets": [
+ 'saharasugg',
+ 'enablenewsfc',
+ 'clgalileo',
+ 'gencontentv3',
+ "nlu_direct_response_filter",
+ "deepleo",
+ "disable_emoji_spoken_text",
+ "responsible_ai_policy_235",
+ "enablemm",
+ "h3precise"
+ "dtappid",
+ "cricinfo",
+ "cricinfov2",
+ "dv3sugg",
+ "nojbfedge"
+ ]
+ }
+
+def format_message(msg: dict) -> str:
+ return json.dumps(msg, ensure_ascii=False) + Defaults.delimiter
+
+def create_message(conversation: Conversation, prompt: str, context: str=None) -> str:
+ struct = {
+ 'arguments': [
+ {
+ **Defaults.optionsSets,
+ 'source': 'cib',
+ 'allowedMessageTypes': Defaults.allowedMessageTypes,
+ 'sliceIds': Defaults.sliceIds,
+ 'traceId': os.urandom(16).hex(),
+ 'isStartOfSession': True,
+ 'message': Defaults.location | {
+ 'author': 'user',
+ 'inputMethod': 'Keyboard',
+ 'text': prompt,
+ 'messageType': 'Chat'
+ },
+ 'conversationSignature': conversation.conversationSignature,
+ 'participant': {
+ 'id': conversation.clientId
+ },
+ 'conversationId': conversation.conversationId
+ }
+ ],
+ 'invocationId': '0',
+ 'target': 'chat',
+ 'type': 4
+ }
+
+ if context:
+ struct['arguments'][0]['previousMessages'] = [{
+ "author": "user",
+ "description": context,
+ "contextType": "WebPage",
+ "messageType": "Context",
+ "messageId": "discover-web--page-ping-mriduna-----"
+ }]
+ return format_message(struct)
+
+async def stream_generate(
+ prompt: str,
+ context: str=None,
+ cookies: dict=None
+ ):
+ async with ClientSession(
+ timeout=aiohttp.ClientTimeout(total=900),
+ cookies=cookies,
+ headers=Defaults.headers,
+ ) as session:
+ conversation = await create_conversation(session)
+ try:
+ async with session.ws_connect(
+ 'wss://sydney.bing.com/sydney/ChatHub',
+ autoping=False,
+ ) as wss:
+
+ await wss.send_str(format_message({'protocol': 'json', 'version': 1}))
+ msg = await wss.receive(timeout=900)
+
+ await wss.send_str(create_message(conversation, prompt, context))
+
+ response_txt = ''
+ result_text = ''
+ returned_text = ''
+ final = False
+
+ while not final:
+ msg = await wss.receive(timeout=900)
+ objects = msg.data.split(Defaults.delimiter)
+ for obj in objects:
+ if obj is None or not obj:
+ continue
+
+ response = json.loads(obj)
+ if response.get('type') == 1 and response['arguments'][0].get('messages'):
+ message = response['arguments'][0]['messages'][0]
+ if (message['contentOrigin'] != 'Apology'):
+ response_txt = result_text + \
+ message['adaptiveCards'][0]['body'][0].get('text', '')
+
+ if message.get('messageType'):
+ inline_txt = message['adaptiveCards'][0]['body'][0]['inlines'][0].get('text')
+ response_txt += inline_txt + '\n'
+ result_text += inline_txt + '\n'
+
+ if response_txt.startswith(returned_text):
+ new = response_txt[len(returned_text):]
+ if new != "\n":
+ yield new
+ returned_text = response_txt
+ elif response.get('type') == 2:
+ result = response['item']['result']
+ if result.get('error'):
+ raise Exception(f"{result['value']}: {result['message']}")
+ final = True
+ break
+ finally:
+ await delete_conversation(session, conversation)
def run(generator: AsyncGenerator[Union[Any, str], Any]):
loop = asyncio.get_event_loop()
@@ -360,3 +300,4 @@ def run(generator: AsyncGenerator[Union[Any, str], Any]):
except StopAsyncIteration:
break
+
diff --git a/g4f/Provider/ChatgptAi.py b/g4f/Provider/ChatgptAi.py
index 53518f65..7613ccf1 100644
--- a/g4f/Provider/ChatgptAi.py
+++ b/g4f/Provider/ChatgptAi.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import re
import requests
@@ -7,55 +9,51 @@ from .base_provider import BaseProvider
class ChatgptAi(BaseProvider):
- url = "https://chatgpt.ai/gpt-4/"
- working = True
- supports_gpt_4 = True
+ url: str = "https://chatgpt.ai/gpt-4/"
+ working = True
+ supports_gpt_4 = True
@staticmethod
def create_completion(
model: str,
messages: list[dict[str, str]],
- stream: bool,
- **kwargs: Any,
- ) -> CreateResult:
- chat = ""
- for message in messages:
- chat += "%s: %s\n" % (message["role"], message["content"])
- chat += "assistant: "
+ stream: bool, **kwargs: Any) -> CreateResult:
+
+ chat = "\n".join(f"{message['role']}: {message['content']}" for message in messages)
+ chat += "\nassistant: "
response = requests.get("https://chatgpt.ai/")
nonce, post_id, _, bot_id = re.findall(
r'data-nonce="(.*)"\n data-post-id="(.*)"\n data-url="(.*)"\n data-bot-id="(.*)"\n data-width',
- response.text,
- )[0]
+ response.text)[0]
headers = {
- "authority": "chatgpt.ai",
- "accept": "*/*",
- "accept-language": "en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3",
- "cache-control": "no-cache",
- "origin": "https://chatgpt.ai",
- "pragma": "no-cache",
- "referer": "https://chatgpt.ai/gpt-4/",
- "sec-ch-ua": '"Not.A/Brand";v="8", "Chromium";v="114", "Google Chrome";v="114"',
- "sec-ch-ua-mobile": "?0",
- "sec-ch-ua-platform": '"Windows"',
- "sec-fetch-dest": "empty",
- "sec-fetch-mode": "cors",
- "sec-fetch-site": "same-origin",
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36",
+ "authority" : "chatgpt.ai",
+ "accept" : "*/*",
+ "accept-language" : "en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3",
+ "cache-control" : "no-cache",
+ "origin" : "https://chatgpt.ai",
+ "pragma" : "no-cache",
+ "referer" : "https://chatgpt.ai/gpt-4/",
+ "sec-ch-ua" : '"Not.A/Brand";v="8", "Chromium";v="114", "Google Chrome";v="114"',
+ "sec-ch-ua-mobile" : "?0",
+ "sec-ch-ua-platform" : '"Windows"',
+ "sec-fetch-dest" : "empty",
+ "sec-fetch-mode" : "cors",
+ "sec-fetch-site" : "same-origin",
+ "user-agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36",
}
data = {
- "_wpnonce": nonce,
- "post_id": post_id,
- "url": "https://chatgpt.ai/gpt-4",
- "action": "wpaicg_chat_shortcode_message",
- "message": chat,
- "bot_id": bot_id,
+ "_wpnonce" : nonce,
+ "post_id" : post_id,
+ "url" : "https://chatgpt.ai/gpt-4",
+ "action" : "wpaicg_chat_shortcode_message",
+ "message" : chat,
+ "bot_id" : bot_id,
}
response = requests.post(
- "https://chatgpt.ai/wp-admin/admin-ajax.php", headers=headers, data=data
- )
+ "https://chatgpt.ai/wp-admin/admin-ajax.php", headers=headers, data=data)
+
response.raise_for_status()
- yield response.json()["data"]
+ yield response.json()["data"]
\ No newline at end of file
diff --git a/g4f/Provider/ChatgptLogin.py b/g4f/Provider/ChatgptLogin.py
index da9fda40..e4584d32 100644
--- a/g4f/Provider/ChatgptLogin.py
+++ b/g4f/Provider/ChatgptLogin.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import base64
import os
import re
@@ -9,61 +11,58 @@ from .base_provider import BaseProvider
class ChatgptLogin(BaseProvider):
- url = "https://opchatgpts.net"
+ url = "https://opchatgpts.net"
supports_gpt_35_turbo = True
- working = True
+ working = True
@staticmethod
def create_completion(
model: str,
messages: list[dict[str, str]],
- stream: bool,
- **kwargs: Any,
- ) -> CreateResult:
+ stream: bool, **kwargs: Any) -> CreateResult:
+
headers = {
- "authority": "chatgptlogin.ac",
- "accept": "*/*",
- "accept-language": "en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3",
- "content-type": "application/json",
- "origin": "https://opchatgpts.net",
- "referer": "https://opchatgpts.net/chatgpt-free-use/",
- "sec-ch-ua": '"Chromium";v="116", "Not)A;Brand";v="24", "Google Chrome";v="116"',
- "sec-ch-ua-mobile": "?0",
- "sec-ch-ua-platform": '"Windows"',
- "sec-fetch-dest": "empty",
- "sec-fetch-mode": "cors",
- "sec-fetch-site": "same-origin",
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36",
- "x-wp-nonce": _get_nonce(),
+ "authority" : "chatgptlogin.ac",
+ "accept" : "*/*",
+ "accept-language" : "en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3",
+ "content-type" : "application/json",
+ "origin" : "https://opchatgpts.net",
+ "referer" : "https://opchatgpts.net/chatgpt-free-use/",
+ "sec-ch-ua" : '"Chromium";v="116", "Not)A;Brand";v="24", "Google Chrome";v="116"',
+ "sec-ch-ua-mobile" : "?0",
+ "sec-ch-ua-platform" : '"Windows"',
+ "sec-fetch-dest" : "empty",
+ "sec-fetch-mode" : "cors",
+ "sec-fetch-site" : "same-origin",
+ "user-agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36",
+ "x-wp-nonce" : _get_nonce(),
}
conversation = _transform(messages)
json_data = {
- "env": "chatbot",
- "session": "N/A",
- "prompt": "Converse as if you were an AI assistant. Be friendly, creative.",
- "context": "Converse as if you were an AI assistant. Be friendly, creative.",
- "messages": conversation,
- "newMessage": messages[-1]["content"],
- "userName": 'User:
',
- "aiName": 'AI:
',
- "model": "gpt-3.5-turbo",
- "temperature": kwargs.get("temperature", 0.8),
- "maxTokens": 1024,
- "maxResults": 1,
- "apiKey": "",
- "service": "openai",
+ "env" : "chatbot",
+ "session" : "N/A",
+ "prompt" : "Converse as if you were an AI assistant. Be friendly, creative.",
+ "context" : "Converse as if you were an AI assistant. Be friendly, creative.",
+ "messages" : conversation,
+ "newMessage" : messages[-1]["content"],
+ "userName" : 'User:
',
+ "aiName" : 'AI:
',
+ "model" : "gpt-3.5-turbo",
+ "temperature" : kwargs.get("temperature", 0.8),
+ "maxTokens" : 1024,
+ "maxResults" : 1,
+ "apiKey" : "",
+ "service" : "openai",
"embeddingsIndex": "",
- "stop": "",
- "clientId": os.urandom(6).hex(),
+ "stop" : "",
+ "clientId" : os.urandom(6).hex()
}
- response = requests.post(
- "https://opchatgpts.net/wp-json/ai-chatbot/v1/chat",
- headers=headers,
- json=json_data,
- )
+ response = requests.post("https://opchatgpts.net/wp-json/ai-chatbot/v1/chat",
+ headers=headers, json=json_data)
+
response.raise_for_status()
yield response.json()["reply"]
@@ -81,24 +80,21 @@ class ChatgptLogin(BaseProvider):
def _get_nonce() -> str:
- res = requests.get(
- "https://opchatgpts.net/chatgpt-free-use/",
- headers={
- "Referer": "https://opchatgpts.net/chatgpt-free-use/",
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36",
- },
- )
+ res = requests.get("https://opchatgpts.net/chatgpt-free-use/",
+ headers = {
+ "Referer" : "https://opchatgpts.net/chatgpt-free-use/",
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36"})
result = re.search(
r'class="mwai-chat mwai-chatgpt">.*Send