From e94e9ffa4ee5aa70ab5a958cdce52ea619eb2de4 Mon Sep 17 00:00:00 2001
From: hp256 <971748116@qq.com>
Date: Fri, 19 May 2023 10:53:44 +0800
Subject: [PATCH 1/5] add oraai
---
gpt4free/oraai/README.md | 20 ++++++++
gpt4free/oraai/__init__.py | 102 +++++++++++++++++++++++++++++++++++++
2 files changed, 122 insertions(+)
create mode 100644 gpt4free/oraai/README.md
create mode 100644 gpt4free/oraai/__init__.py
diff --git a/gpt4free/oraai/README.md b/gpt4free/oraai/README.md
new file mode 100644
index 00000000..6f538711
--- /dev/null
+++ b/gpt4free/oraai/README.md
@@ -0,0 +1,20 @@
+# OraAI
+Written by [hp_mzx](https://github.com/hpsj).
+
+## Examples:
+### Completion:
+```python
+chunk = oraai.Completion.create("who are you")
+print(chunk)
+```
+
+### Chat Completion:
+Support context
+```python
+obj = oraai.Completion()
+whilt True:
+ prompt = input("Please enter a question:")
+ chunk = obj.create(prompt)
+ print(chunk)
+print()
+```
\ No newline at end of file
diff --git a/gpt4free/oraai/__init__.py b/gpt4free/oraai/__init__.py
new file mode 100644
index 00000000..99a70098
--- /dev/null
+++ b/gpt4free/oraai/__init__.py
@@ -0,0 +1,102 @@
+import time
+import json
+import uuid
+import random
+import requests
+from fake_useragent import UserAgent
+
+
+class ChatCompletion:
+ def __init__(self,proxy=None,chatbotId="5111b690-edd3-403f-b02a-607332d059f9"):
+ self.userId = "auto:"+str(uuid.uuid4())
+ self.chatbotId = chatbotId
+ self.proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else None
+ self.conversationId = None
+ self.headers = {
+ "Content-Type": "application/json",
+ "Origin": "https://ora.ai",
+ "Referer": "https://ora.ai/",
+ 'user-agent': UserAgent().random,
+ }
+
+ def create(self,prompt: str):
+ url = "https://ora.ai/api/conversation"
+ data = {
+ "chatbotId": self.chatbotId,
+ "config": False,
+ "includeHistory": True,
+ "input": prompt,
+ "provider": "OPEN_AI",
+ "userId": self.userId,
+ }
+
+ if self.conversationId:
+ data["conversationId"] = self.conversationId
+ response = requests.post(
+ url,
+ data=json.dumps(data),
+ proxies=self.proxies,
+ headers=self.headers
+ )
+ if response.status_code == 200:
+ response_json = response.json()
+ self.conversationId = response_json["conversationId"]
+ return response_json["response"]
+
+ raise ValueError(response.text)
+
+
+ def generate_image(self,prompt:str):
+ url = "https://ora.ai/api/images/request"
+ data = {
+ "prompt":prompt,
+ "seed":random.randint(0, 4294967295)
+ }
+ response = requests.post(
+ url,
+ data=json.dumps(data),
+ proxies=self.proxies,
+ headers=self.headers
+ )
+ if response.status_code == 200:
+ inferenceId = response.json()["id"]
+ else:
+ raise ValueError(response.text)
+
+ data = {
+ "chatbotId":self.chatbotId,
+ "inferenceId":inferenceId,
+ "userId":self.userId,
+ "userInput":"/generate " + prompt
+ }
+ print(data)
+ if self.conversationId:
+ data["conversationId"] = self.conversationId
+ while True:
+ response = requests.post(
+ "https://ora.ai/api/images/check",
+ data=json.dumps(data),
+ proxies=self.proxies,
+ headers=self.headers
+ )
+ if response.status_code == 200:
+ response_json = response.json()
+ if response_json.get("conversationId"):
+ self.conversationId = response_json["conversationId"]
+ return response_json["inference"]["images"][0]["uri"]
+ else:
+ time.sleep(0.5)
+ else:
+ raise ValueError(response.text)
+
+
+class Completion:
+ @classmethod
+ def create(self, prompt, proxy):
+ return ChatCompletion(proxy).create(prompt)
+
+
+
+
+
+
From 3faf3630ced0ca3d7de0125be0901b713e0ef865 Mon Sep 17 00:00:00 2001
From: hp256 <971748116@qq.com>
Date: Mon, 22 May 2023 14:14:09 +0800
Subject: [PATCH 2/5] add hpgptapi
---
gpt4free/hpgptai/README.md | 39 +++++++++++++++++
gpt4free/hpgptai/__init__.py | 83 ++++++++++++++++++++++++++++++++++++
testing/hpgptai_test.py | 41 ++++++++++++++++++
3 files changed, 163 insertions(+)
create mode 100644 gpt4free/hpgptai/README.md
create mode 100644 gpt4free/hpgptai/__init__.py
create mode 100644 testing/hpgptai_test.py
diff --git a/gpt4free/hpgptai/README.md b/gpt4free/hpgptai/README.md
new file mode 100644
index 00000000..2735902f
--- /dev/null
+++ b/gpt4free/hpgptai/README.md
@@ -0,0 +1,39 @@
+# HpgptAI
+Written by [hp_mzx](https://github.com/hpsj).
+
+## Examples:
+### Completion:
+```python
+res = hpgptai.Completion.create("你是谁","127.0.0.1:7890")
+print(res["reply"])
+```
+
+### Chat Completion:
+Support context
+```python
+messages = [
+ {
+ "content": "你是谁",
+ "html": "你是谁",
+ "id": hpgptai.ChatCompletion.randomStr(),
+ "role": "user",
+ "who": "User: ",
+ },
+ {
+ "content": "我是一位AI助手,专门为您提供各种服务和支持。我可以回答您的问题,帮助您解决问题,提供相关信息,并执行一些任务。请随时告诉我您需要什么帮助。",
+ "html": "我是一位AI助手,专门为您提供各种服务和支持。我可以回答您的问题,帮助您解决问题,提供相关信息,并执行一些任务。请随时告诉我您需要什么帮助。",
+ "id": hpgptai.ChatCompletion.randomStr(),
+ "role": "assistant",
+ "who": "AI: ",
+ },
+ {
+ "content": "我上一句问的是什么?",
+ "html": "我上一句问的是什么?",
+ "id": hpgptai.ChatCompletion.randomStr(),
+ "role": "user",
+ "who": "User: ",
+ },
+]
+res = hpgptai.ChatCompletion.create(messages,proxy="127.0.0.1:7890")
+print(res["reply"])
+```
\ No newline at end of file
diff --git a/gpt4free/hpgptai/__init__.py b/gpt4free/hpgptai/__init__.py
new file mode 100644
index 00000000..66841a87
--- /dev/null
+++ b/gpt4free/hpgptai/__init__.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+"""
+@Time : 2023/5/22 14:04
+@Auth : Hp_mzx
+@File :__init__.py.py
+@IDE :PyCharm
+"""
+import json
+import requests
+import random
+import string
+
+class ChatCompletion:
+ @staticmethod
+ def create(
+ messages: list,
+ context: str="Converse as if you were an AI assistant. Be friendly, creative.",
+ proxy:str=None
+ ):
+ url = "https://chatgptlogin.ac/wp-json/ai-chatbot/v1/chat"
+ headers = {
+ "Content-Type": "application/json",
+ "X-Wp-Nonce": "02244d73c2"
+ }
+ proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else None
+ data = {
+ "env": "chatbot",
+ "session": "N/A",
+ "prompt": ChatCompletion.__build_prompt(context,messages),
+ "context": context,
+ "messages": messages,
+ "newMessage": messages[-1]["content"],
+ "userName": "
User:
",
+ "aiName": "AI:
",
+ "model": "gpt-3.5-turbo",
+ "temperature": 0.8,
+ "maxTokens": 1024,
+ "maxResults": 1,
+ "apiKey": "",
+ "service": "openai",
+ "embeddingsIndex": "",
+ "stop": "",
+ "clientId": ChatCompletion.randomStr(),
+ }
+ res = requests.post(url=url, data=json.dumps(data), headers=headers, proxies=proxies)
+ if res.status_code == 200:
+ return res.json()
+ return res.text
+
+
+ @staticmethod
+ def randomStr():
+ return ''.join(random.choices(string.ascii_lowercase + string.digits, k=34))[:11]
+
+ @classmethod
+ def __build_prompt(cls, context: str, message: list, isCasuallyFineTuned=False, last=15):
+ prompt = context + '\n\n' if context else ''
+ message = message[-last:]
+ if isCasuallyFineTuned:
+ lastLine = message[-1]
+ prompt = lastLine.content + ""
+ return prompt
+ conversation = [x["who"] + x["content"] for x in message]
+ prompt += '\n'.join(conversation)
+ prompt += '\n' + "AI: "
+ return prompt
+
+
+
+
+class Completion:
+ @staticmethod
+ def create(prompt: str,proxy:str):
+ messages = [
+ {
+ "content": prompt,
+ "html": prompt,
+ "id": ChatCompletion.randomStr(),
+ "role": "user",
+ "who": "User: ",
+ },
+ ]
+ return ChatCompletion.create(messages=messages,proxy=proxy)
\ No newline at end of file
diff --git a/testing/hpgptai_test.py b/testing/hpgptai_test.py
new file mode 100644
index 00000000..cdd146dd
--- /dev/null
+++ b/testing/hpgptai_test.py
@@ -0,0 +1,41 @@
+import hpgptai
+
+#single completion
+res = hpgptai.Completion.create("你是谁","127.0.0.1:7890")
+print(res["reply"])
+
+
+#chat completion
+messages = [
+ {
+ "content": "你是谁",
+ "html": "你是谁",
+ "id": hpgptai.ChatCompletion.randomStr(),
+ "role": "user",
+ "who": "User: ",
+ },
+ {
+ "content": "我是一位AI助手,专门为您提供各种服务和支持。我可以回答您的问题,帮助您解决问题,提供相关信息,并执行一些任务。请随时告诉我您需要什么帮助。",
+ "html": "我是一位AI助手,专门为您提供各种服务和支持。我可以回答您的问题,帮助您解决问题,提供相关信息,并执行一些任务。请随时告诉我您需要什么帮助。",
+ "id": hpgptai.ChatCompletion.randomStr(),
+ "role": "assistant",
+ "who": "AI: ",
+ },
+ {
+ "content": "我上一句问的是什么?",
+ "html": "我上一句问的是什么?",
+ "id": hpgptai.ChatCompletion.randomStr(),
+ "role": "user",
+ "who": "User: ",
+ },
+]
+res = hpgptai.ChatCompletion.create(messages,proxy="127.0.0.1:7890")
+print(res["reply"])
+
+
+
+
+
+
+
+
From 25dcf166a74151c5729be9c7901aedd7afd0b8f8 Mon Sep 17 00:00:00 2001
From: hp256 <971748116@qq.com>
Date: Tue, 23 May 2023 16:10:13 +0800
Subject: [PATCH 3/5] delete ora
---
gpt4free/oraai/README.md | 20 --------
gpt4free/oraai/__init__.py | 102 -------------------------------------
2 files changed, 122 deletions(-)
delete mode 100644 gpt4free/oraai/README.md
delete mode 100644 gpt4free/oraai/__init__.py
diff --git a/gpt4free/oraai/README.md b/gpt4free/oraai/README.md
deleted file mode 100644
index 6f538711..00000000
--- a/gpt4free/oraai/README.md
+++ /dev/null
@@ -1,20 +0,0 @@
-# OraAI
-Written by [hp_mzx](https://github.com/hpsj).
-
-## Examples:
-### Completion:
-```python
-chunk = oraai.Completion.create("who are you")
-print(chunk)
-```
-
-### Chat Completion:
-Support context
-```python
-obj = oraai.Completion()
-whilt True:
- prompt = input("Please enter a question:")
- chunk = obj.create(prompt)
- print(chunk)
-print()
-```
\ No newline at end of file
diff --git a/gpt4free/oraai/__init__.py b/gpt4free/oraai/__init__.py
deleted file mode 100644
index 99a70098..00000000
--- a/gpt4free/oraai/__init__.py
+++ /dev/null
@@ -1,102 +0,0 @@
-import time
-import json
-import uuid
-import random
-import requests
-from fake_useragent import UserAgent
-
-
-class ChatCompletion:
- def __init__(self,proxy=None,chatbotId="5111b690-edd3-403f-b02a-607332d059f9"):
- self.userId = "auto:"+str(uuid.uuid4())
- self.chatbotId = chatbotId
- self.proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else None
- self.conversationId = None
- self.headers = {
- "Content-Type": "application/json",
- "Origin": "https://ora.ai",
- "Referer": "https://ora.ai/",
- 'user-agent': UserAgent().random,
- }
-
- def create(self,prompt: str):
- url = "https://ora.ai/api/conversation"
- data = {
- "chatbotId": self.chatbotId,
- "config": False,
- "includeHistory": True,
- "input": prompt,
- "provider": "OPEN_AI",
- "userId": self.userId,
- }
-
- if self.conversationId:
- data["conversationId"] = self.conversationId
- response = requests.post(
- url,
- data=json.dumps(data),
- proxies=self.proxies,
- headers=self.headers
- )
- if response.status_code == 200:
- response_json = response.json()
- self.conversationId = response_json["conversationId"]
- return response_json["response"]
-
- raise ValueError(response.text)
-
-
- def generate_image(self,prompt:str):
- url = "https://ora.ai/api/images/request"
- data = {
- "prompt":prompt,
- "seed":random.randint(0, 4294967295)
- }
- response = requests.post(
- url,
- data=json.dumps(data),
- proxies=self.proxies,
- headers=self.headers
- )
- if response.status_code == 200:
- inferenceId = response.json()["id"]
- else:
- raise ValueError(response.text)
-
- data = {
- "chatbotId":self.chatbotId,
- "inferenceId":inferenceId,
- "userId":self.userId,
- "userInput":"/generate " + prompt
- }
- print(data)
- if self.conversationId:
- data["conversationId"] = self.conversationId
- while True:
- response = requests.post(
- "https://ora.ai/api/images/check",
- data=json.dumps(data),
- proxies=self.proxies,
- headers=self.headers
- )
- if response.status_code == 200:
- response_json = response.json()
- if response_json.get("conversationId"):
- self.conversationId = response_json["conversationId"]
- return response_json["inference"]["images"][0]["uri"]
- else:
- time.sleep(0.5)
- else:
- raise ValueError(response.text)
-
-
-class Completion:
- @classmethod
- def create(self, prompt, proxy):
- return ChatCompletion(proxy).create(prompt)
-
-
-
-
-
-
From a74d4ae363f0190c9696d1270862585ff54c445f Mon Sep 17 00:00:00 2001
From: hp256 <971748116@qq.com>
Date: Tue, 23 May 2023 16:52:00 +0800
Subject: [PATCH 4/5] add hpgptai
---
gpt4free/hpgptai/__init__.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/gpt4free/hpgptai/__init__.py b/gpt4free/hpgptai/__init__.py
index 66841a87..c8772a19 100644
--- a/gpt4free/hpgptai/__init__.py
+++ b/gpt4free/hpgptai/__init__.py
@@ -15,12 +15,13 @@ class ChatCompletion:
def create(
messages: list,
context: str="Converse as if you were an AI assistant. Be friendly, creative.",
+ restNonce:str="9d6d743bd3",
proxy:str=None
):
url = "https://chatgptlogin.ac/wp-json/ai-chatbot/v1/chat"
headers = {
"Content-Type": "application/json",
- "X-Wp-Nonce": "02244d73c2"
+ "X-Wp-Nonce": restNonce
}
proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else None
data = {
From f545f4b479d3d8732828f758efaac20152b4c104 Mon Sep 17 00:00:00 2001
From: hp256 <971748116@qq.com>
Date: Tue, 23 May 2023 17:03:26 +0800
Subject: [PATCH 5/5] add gptworldai
---
gpt4free/gptworldAi/README.md | 25 ++++++++
gpt4free/gptworldAi/__init__.py | 103 ++++++++++++++++++++++++++++++++
testing/gptworldai_test.py | 18 ++++++
3 files changed, 146 insertions(+)
create mode 100644 gpt4free/gptworldAi/README.md
create mode 100644 gpt4free/gptworldAi/__init__.py
create mode 100644 testing/gptworldai_test.py
diff --git a/gpt4free/gptworldAi/README.md b/gpt4free/gptworldAi/README.md
new file mode 100644
index 00000000..a6b07f86
--- /dev/null
+++ b/gpt4free/gptworldAi/README.md
@@ -0,0 +1,25 @@
+# gptworldAi
+Written by [hp_mzx](https://github.com/hpsj).
+
+## Examples:
+### Completion:
+```python
+for chunk in gptworldAi.Completion.create("你是谁", "127.0.0.1:7890"):
+ print(chunk, end="", flush=True)
+ print()
+```
+
+### Chat Completion:
+Support context
+```python
+message = []
+while True:
+ prompt = input("请输入问题:")
+ message.append({"role": "user","content": prompt})
+ text = ""
+ for chunk in gptworldAi.ChatCompletion.create(message,'127.0.0.1:7890'):
+ text = text+chunk
+ print(chunk, end="", flush=True)
+ print()
+ message.append({"role": "assistant", "content": text})
+```
\ No newline at end of file
diff --git a/gpt4free/gptworldAi/__init__.py b/gpt4free/gptworldAi/__init__.py
new file mode 100644
index 00000000..a729fdf8
--- /dev/null
+++ b/gpt4free/gptworldAi/__init__.py
@@ -0,0 +1,103 @@
+# -*- coding: utf-8 -*-
+"""
+@Time : 2023/5/23 13:37
+@Auth : Hp_mzx
+@File :__init__.py.py
+@IDE :PyCharm
+"""
+import json
+import random
+import binascii
+import requests
+import Crypto.Cipher.AES as AES
+from fake_useragent import UserAgent
+
+class ChatCompletion:
+ @staticmethod
+ def create(messages:[],proxy: str = None):
+ url = "https://chat.getgpt.world/api/chat/stream"
+ headers = {
+ "Content-Type": "application/json",
+ "Referer": "https://chat.getgpt.world/",
+ 'user-agent': UserAgent().random,
+ }
+ proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else None
+ data = json.dumps({
+ "messages": messages,
+ "frequency_penalty": 0,
+ "max_tokens": 4000,
+ "model": "gpt-3.5-turbo",
+ "presence_penalty": 0,
+ "temperature": 1,
+ "top_p": 1,
+ "stream": True
+ })
+ signature = ChatCompletion.encrypt(data)
+ res = requests.post(url, headers=headers, data=json.dumps({"signature": signature}), proxies=proxies,stream=True)
+ for chunk in res.iter_content(chunk_size=None):
+ res.raise_for_status()
+ datas = chunk.decode('utf-8').split('data: ')
+ for data in datas:
+ if not data or "[DONE]" in data:
+ continue
+ data_json = json.loads(data)
+ content = data_json['choices'][0]['delta'].get('content')
+ if content:
+ yield content
+
+
+ @staticmethod
+ def random_token(e):
+ token = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
+ n = len(token)
+ return "".join([token[random.randint(0, n - 1)] for i in range(e)])
+
+ @staticmethod
+ def encrypt(e):
+ t = ChatCompletion.random_token(16).encode('utf-8')
+ n = ChatCompletion.random_token(16).encode('utf-8')
+ r = e.encode('utf-8')
+ cipher = AES.new(t, AES.MODE_CBC, n)
+ ciphertext = cipher.encrypt(ChatCompletion.__pad_data(r))
+ return binascii.hexlify(ciphertext).decode('utf-8') + t.decode('utf-8') + n.decode('utf-8')
+
+ @staticmethod
+ def __pad_data(data: bytes) -> bytes:
+ block_size = AES.block_size
+ padding_size = block_size - len(data) % block_size
+ padding = bytes([padding_size] * padding_size)
+ return data + padding
+
+
+class Completion:
+ @staticmethod
+ def create(prompt:str,proxy:str=None):
+ return ChatCompletion.create([
+ {
+ "content": "You are ChatGPT, a large language model trained by OpenAI.\nCarefully heed the user's instructions. \nRespond using Markdown.",
+ "role": "system"
+ },
+ {"role": "user", "content": prompt}
+ ], proxy)
+
+
+if __name__ == '__main__':
+ # single completion
+ text = ""
+ for chunk in Completion.create("你是谁", "127.0.0.1:7890"):
+ text = text + chunk
+ print(chunk, end="", flush=True)
+ print()
+
+
+ #chat completion
+ message = []
+ while True:
+ prompt = input("请输入问题:")
+ message.append({"role": "user","content": prompt})
+ text = ""
+ for chunk in ChatCompletion.create(message,'127.0.0.1:7890'):
+ text = text+chunk
+ print(chunk, end="", flush=True)
+ print()
+ message.append({"role": "assistant", "content": text})
\ No newline at end of file
diff --git a/testing/gptworldai_test.py b/testing/gptworldai_test.py
new file mode 100644
index 00000000..3dfb32ce
--- /dev/null
+++ b/testing/gptworldai_test.py
@@ -0,0 +1,18 @@
+import gptworldAi
+
+# single completion
+for chunk in gptworldAi.Completion.create("你是谁", "127.0.0.1:7890"):
+ print(chunk, end="", flush=True)
+print()
+
+# chat completion
+message = []
+while True:
+ prompt = input("请输入问题:")
+ message.append({"role": "user", "content": prompt})
+ text = ""
+ for chunk in gptworldAi.ChatCompletion.create(message, '127.0.0.1:7890'):
+ text = text + chunk
+ print(chunk, end="", flush=True)
+ print()
+ message.append({"role": "assistant", "content": text})