mirror of https://github.com/xtekky/gpt4free
Merge branch 'main' of https://github.com/hpsj/gpt4free
commit
c7d26ed867
@ -0,0 +1,19 @@
|
||||
aiassist.site
|
||||
|
||||
### Example: `aiassist` <a name="example-assist"></a>
|
||||
|
||||
```python
|
||||
import aiassist
|
||||
|
||||
question1 = "Who won the world series in 2020?"
|
||||
req = aiassist.Completion.create(prompt=question1)
|
||||
answer = req["text"]
|
||||
message_id = req["parentMessageId"]
|
||||
|
||||
question2 = "Where was it played?"
|
||||
req2 = aiassist.Completion.create(prompt=question2, parentMessageId=message_id)
|
||||
answer2 = req2["text"]
|
||||
|
||||
print(answer)
|
||||
print(answer2)
|
||||
```
|
@ -0,0 +1,34 @@
|
||||
import json
|
||||
import requests
|
||||
|
||||
|
||||
class Completion:
|
||||
@staticmethod
|
||||
def create(
|
||||
systemMessage: str = "You are a helpful assistant",
|
||||
prompt: str = "",
|
||||
parentMessageId: str = "",
|
||||
temperature: float = 0.8,
|
||||
top_p: float = 1,
|
||||
):
|
||||
json_data = {
|
||||
"prompt": prompt,
|
||||
"options": {"parentMessageId": parentMessageId},
|
||||
"systemMessage": systemMessage,
|
||||
"temperature": temperature,
|
||||
"top_p": top_p,
|
||||
}
|
||||
|
||||
url = "http://43.153.7.56:8080/api/chat-process"
|
||||
request = requests.post(url, json=json_data)
|
||||
content = request.content
|
||||
|
||||
response = Completion.__load_json(content)
|
||||
return response
|
||||
|
||||
@classmethod
|
||||
def __load_json(cls, content) -> dict:
|
||||
decode_content = str(content.decode("utf-8"))
|
||||
split = decode_content.rsplit("\n", 1)[1]
|
||||
to_json = json.loads(split)
|
||||
return to_json
|
@ -0,0 +1,29 @@
|
||||
# Itagpt2(Rewrite)
|
||||
Written by [sife-shuo](https://github.com/sife-shuo/).
|
||||
|
||||
## Description
|
||||
Unlike gpt4free. italygpt in the pypi package, italygpt2 supports stream calls and has changed the request sending method to enable continuous and logical conversations.
|
||||
|
||||
The speed will increase when calling the conversation multiple times.
|
||||
|
||||
### Completion:
|
||||
```python
|
||||
account_data=italygpt2.Account.create()
|
||||
for chunk in italygpt2.Completion.create(account_data=account_data,prompt="Who are you?"):
|
||||
print(chunk, end="", flush=True)
|
||||
print()
|
||||
```
|
||||
|
||||
### Chat
|
||||
Like most chatgpt projects, format is supported.
|
||||
Use the same format for the messages as you would for the [official OpenAI API](https://platform.openai.com/docs/guides/chat/introduction).
|
||||
```python
|
||||
messages = [
|
||||
{"role": "system", "content": ""},#...
|
||||
{"role": "user", "content": ""}#....
|
||||
]
|
||||
account_data=italygpt2.Account.create()
|
||||
for chunk in italygpt2.Completion.create(account_data=account_data,prompt="Who are you?",message=messages):
|
||||
print(chunk, end="", flush=True)
|
||||
print()
|
||||
```
|
@ -0,0 +1,70 @@
|
||||
import re
|
||||
import requests
|
||||
import hashlib
|
||||
from fake_useragent import UserAgent
|
||||
class Account:
|
||||
@staticmethod
|
||||
def create():
|
||||
r=requests.get("https://italygpt.it/",headers=Account._header)
|
||||
f=r.text
|
||||
tid=re.search('<input type=\"hidden\" name=\"next_id\" id=\"next_id\" value=\"(\w+)\">',f).group(1)
|
||||
if len(tid)==0:
|
||||
raise RuntimeError("NetWorkError:failed to get id.")
|
||||
else:
|
||||
Account._tid=tid
|
||||
Account._raw="[]"
|
||||
return Account
|
||||
def next(next_id:str)->str:
|
||||
Account._tid=next_id
|
||||
return Account._tid
|
||||
def get()->str:
|
||||
return Account._tid
|
||||
_header={
|
||||
"Host": "italygpt.it",
|
||||
"Referer":"https://italygpt.it/",
|
||||
"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36",#UserAgent().random,
|
||||
"Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
|
||||
"Accept-Language":"zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2",
|
||||
"Upgrade-Insecure-Requests":"1",
|
||||
"Sec-Fetch-Dest":"document",
|
||||
"Sec-Fetch-Mode":"navigate",
|
||||
"Sec-Fetch-Site":"none",
|
||||
"Sec-Fetch-User":"?1",
|
||||
"Connection":"keep-alive",
|
||||
"Alt-Used":"italygpt.it",
|
||||
"Pragma":"no-cache",
|
||||
"Cache-Control":"no-cache",
|
||||
"TE": "trailers"
|
||||
}
|
||||
def settraw(raws:str):
|
||||
Account._raw=raws
|
||||
return Account._raw
|
||||
def gettraw():
|
||||
return Account._raw
|
||||
|
||||
class Completion:
|
||||
@staticmethod
|
||||
def create(
|
||||
account_data,
|
||||
prompt: str,
|
||||
message=False
|
||||
):
|
||||
param={
|
||||
"prompt":prompt.replace(" ","+"),
|
||||
"creative":"off",
|
||||
"internet":"false",
|
||||
"detailed":"off",
|
||||
"current_id":"0",
|
||||
"code":"",
|
||||
"gpt4":"false",
|
||||
"raw_messages":account_data.gettraw(),
|
||||
"hash":hashlib.sha256(account_data.get().encode()).hexdigest()
|
||||
}
|
||||
if(message):
|
||||
param["raw_messages"]=str(message)
|
||||
r = requests.get("https://italygpt.it/question",headers=account_data._header,params=param,stream=True)
|
||||
account_data.next(r.headers["Next_id"])
|
||||
account_data.settraw(r.headers["Raw_messages"])
|
||||
for chunk in r.iter_content(chunk_size=None):
|
||||
r.raise_for_status()
|
||||
yield chunk.decode()
|
@ -0,0 +1,3 @@
|
||||
import usesless
|
||||
|
||||
usesless.Account.create(logging=True)
|
@ -0,0 +1,13 @@
|
||||
import aiassist
|
||||
|
||||
question1 = "Who won the world series in 2020?"
|
||||
req = aiassist.Completion.create(prompt=question1)
|
||||
answer = req["text"]
|
||||
message_id = req["parentMessageId"]
|
||||
|
||||
question2 = "Where was it played?"
|
||||
req2 = aiassist.Completion.create(prompt=question2, parentMessageId=message_id)
|
||||
answer2 = req2["text"]
|
||||
|
||||
print(answer)
|
||||
print(answer2)
|
@ -0,0 +1,4 @@
|
||||
from gpt4free import italygpt2
|
||||
account_data=italygpt2.Account.create()
|
||||
for chunk in italygpt2.Completion.create(account_data=account_data,prompt="Who are you?"):
|
||||
print(chunk, end="", flush=True)
|
Loading…
Reference in New Issue