mirror of
https://github.com/xtekky/gpt4free.git
synced 2024-11-10 19:11:01 +00:00
Add py_arkose_generator in OpenaiChat
This commit is contained in:
parent
a00e4d9b45
commit
c796ba2253
@ -1,8 +1,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import uuid, json, time, os
|
import uuid, json, time, os
|
||||||
import tempfile, shutil, asyncio
|
from py_arkose_generator.arkose import get_values_for_request
|
||||||
import sys, subprocess
|
|
||||||
|
|
||||||
from ..base_provider import AsyncGeneratorProvider
|
from ..base_provider import AsyncGeneratorProvider
|
||||||
from ..helper import get_browser, get_cookies, format_prompt, get_event_loop
|
from ..helper import get_browser, get_cookies, format_prompt, get_event_loop
|
||||||
@ -145,57 +144,22 @@ class OpenaiChat(AsyncGeneratorProvider):
|
|||||||
return f"g4f.provider.{cls.__name__} supports: ({param})"
|
return f"g4f.provider.{cls.__name__} supports: ({param})"
|
||||||
|
|
||||||
async def get_arkose_token(proxy: str = None) -> str:
|
async def get_arkose_token(proxy: str = None) -> str:
|
||||||
dir = os.path.dirname(os.path.dirname(__file__))
|
|
||||||
include = f'{dir}/npm/node_modules/funcaptcha'
|
|
||||||
config = {
|
config = {
|
||||||
"pkey": "3D86FBBA-9D22-402A-B512-3420086BA6CC",
|
"pkey": "3D86FBBA-9D22-402A-B512-3420086BA6CC",
|
||||||
"surl": "https://tcr9i.chat.openai.com",
|
"surl": "https://tcr9i.chat.openai.com",
|
||||||
"data": {},
|
|
||||||
"headers": {
|
"headers": {
|
||||||
"User-Agent": 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36'
|
"User-Agent": 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36'
|
||||||
},
|
},
|
||||||
"site": "https://chat.openai.com",
|
"site": "https://chat.openai.com",
|
||||||
"proxy": proxy
|
|
||||||
}
|
}
|
||||||
source = """
|
args_for_request = get_values_for_request(config)
|
||||||
fun = require({include})
|
async with StreamSession(
|
||||||
config = {config}
|
proxies={"https": proxy},
|
||||||
fun.getToken(config).then(token => {
|
impersonate="chrome107",
|
||||||
console.log(token.token)
|
) as session:
|
||||||
})
|
async with session.post(**args_for_request) as response:
|
||||||
"""
|
response.raise_for_status()
|
||||||
source = source.replace('{include}', json.dumps(include))
|
decoded_json = await response.json()
|
||||||
source = source.replace('{config}', json.dumps(config))
|
if "token" in decoded_json:
|
||||||
tmp = tempfile.NamedTemporaryFile(delete=False)
|
return decoded_json["token"]
|
||||||
tmp.write(source.encode())
|
raise RuntimeError(f"Response: {decoded_json}")
|
||||||
tmp.close()
|
|
||||||
try:
|
|
||||||
return await exec_js(tmp.name)
|
|
||||||
finally:
|
|
||||||
os.unlink(tmp.name)
|
|
||||||
|
|
||||||
async def exec_js(file: str) -> str:
|
|
||||||
node = shutil.which("node")
|
|
||||||
if not node:
|
|
||||||
if debug.logging:
|
|
||||||
print('OpenaiChat: "node" not found')
|
|
||||||
return
|
|
||||||
if sys.platform == 'win32':
|
|
||||||
p = subprocess.Popen(
|
|
||||||
[node, file],
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.PIPE
|
|
||||||
)
|
|
||||||
stdout, stderr = p.communicate()
|
|
||||||
if p.returncode == 0:
|
|
||||||
return stdout.decode()
|
|
||||||
raise RuntimeError(f"Exec Error: {stderr.decode()}")
|
|
||||||
p = await asyncio.create_subprocess_exec(
|
|
||||||
node, file,
|
|
||||||
stderr=asyncio.subprocess.PIPE,
|
|
||||||
stdout=asyncio.subprocess.PIPE
|
|
||||||
)
|
|
||||||
stdout, stderr = await p.communicate()
|
|
||||||
if p.returncode == 0:
|
|
||||||
return stdout.decode()
|
|
||||||
raise RuntimeError(f"Exec Error: {stderr.decode()}")
|
|
@ -20,4 +20,5 @@ numpy
|
|||||||
asgiref
|
asgiref
|
||||||
fastapi
|
fastapi
|
||||||
uvicorn
|
uvicorn
|
||||||
flask
|
flask
|
||||||
|
git+https://github.com/hlohaus/py-arkose-token-generator.git
|
Loading…
Reference in New Issue
Block a user