Merge pull request #1124 from hlohaus/fake

Improve helper
This commit is contained in:
Tekky 2023-10-22 22:55:32 +01:00 committed by GitHub
commit 955fb4bbaa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 68 additions and 48 deletions

View File

@ -175,7 +175,7 @@ docker compose down
```py ```py
import g4f import g4f
g4f.logging = True # enable logging g4f.debug.logging = True # enable logging
g4f.check_version = False # Disable automatic version checking g4f.check_version = False # Disable automatic version checking
print(g4f.version) # check version print(g4f.version) # check version
print(g4f.Provider.Ails.params) # supported args print(g4f.Provider.Ails.params) # supported args

View File

@ -1,7 +1,7 @@
from __future__ import annotations from __future__ import annotations
import random, json import random, json
from ..debug import logging from .. import debug
from ..typing import AsyncResult, Messages from ..typing import AsyncResult, Messages
from ..requests import StreamSession from ..requests import StreamSession
from .base_provider import AsyncGeneratorProvider, format_prompt, get_cookies from .base_provider import AsyncGeneratorProvider, format_prompt, get_cookies
@ -36,7 +36,7 @@ class AItianhuSpace(AsyncGeneratorProvider):
rand = ''.join(random.choice(chars) for _ in range(6)) rand = ''.join(random.choice(chars) for _ in range(6))
domain = f"{rand}.{domains[model]}" domain = f"{rand}.{domains[model]}"
if logging: if debug.logging:
print(f"AItianhuSpace | using domain: {domain}") print(f"AItianhuSpace | using domain: {domain}")
if not cookies: if not cookies:

View File

@ -14,7 +14,7 @@ from .helper import format_prompt, get_cookies
class ChatgptFree(AsyncProvider): class ChatgptFree(AsyncProvider):
url = "https://chatgptfree.ai" url = "https://chatgptfree.ai"
supports_gpt_35_turbo = True supports_gpt_35_turbo = True
working = True working = False
_post_id = None _post_id = None
_nonce = None _nonce = None
@ -24,6 +24,7 @@ class ChatgptFree(AsyncProvider):
model: str, model: str,
messages: Messages, messages: Messages,
proxy: str = None, proxy: str = None,
timeout: int = 120,
cookies: dict = None, cookies: dict = None,
**kwargs **kwargs
) -> str: ) -> str:
@ -45,14 +46,19 @@ class ChatgptFree(AsyncProvider):
'sec-fetch-dest': 'empty', 'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors', 'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin', 'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36', 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36',
} }
async with StreamSession(headers=headers, async with StreamSession(
impersonate="chrome107", proxies={"https": proxy}, timeout=10) as session: headers=headers,
cookies=cookies,
impersonate="chrome107",
proxies={"https": proxy},
timeout=timeout
) as session:
if not cls._nonce: if not cls._nonce:
async with session.get(f"{cls.url}/", cookies=cookies) as response: async with session.get(f"{cls.url}/") as response:
response.raise_for_status() response.raise_for_status()
response = await response.text() response = await response.text()

View File

@ -3,13 +3,14 @@ from __future__ import annotations
import sys import sys
import asyncio import asyncio
import webbrowser import webbrowser
import http.cookiejar
from os import path from os import path
from asyncio import AbstractEventLoop from asyncio import AbstractEventLoop
from platformdirs import user_config_dir
from ..typing import Dict, Messages from ..typing import Dict, Messages
from browser_cookie3 import chrome, chromium, opera, opera_gx, brave, edge, vivaldi, firefox, BrowserCookieError from browser_cookie3 import chrome, chromium, opera, opera_gx, brave, edge, vivaldi, firefox, BrowserCookieError
from .. import debug
# Change event loop policy on windows # Change event loop policy on windows
if sys.platform == 'win32': if sys.platform == 'win32':
@ -44,7 +45,6 @@ def get_event_loop() -> AbstractEventLoop:
) )
def init_cookies(): def init_cookies():
urls = [ urls = [
'https://chat-gpt.org', 'https://chat-gpt.org',
'https://www.aitianhu.com', 'https://www.aitianhu.com',
@ -72,16 +72,26 @@ def init_cookies():
# Load cookies for a domain from all supported browsers. # Load cookies for a domain from all supported browsers.
# Cache the results in the "_cookies" variable. # Cache the results in the "_cookies" variable.
def get_cookies(domain_name=''): def get_cookies(domain_name=''):
cj = http.cookiejar.CookieJar() if domain_name in _cookies:
for cookie_fn in [chrome, chromium, opera, opera_gx, brave, edge, vivaldi, firefox]: return _cookies[domain_name]
def g4f(domain_name):
user_data_dir = user_config_dir("g4f")
cookie_file = path.join(user_data_dir, "Default", "Cookies")
if not path.exists(cookie_file):
return []
return chrome(cookie_file, domain_name)
cookies = {}
for cookie_fn in [g4f, chrome, chromium, opera, opera_gx, brave, edge, vivaldi, firefox]:
try: try:
for cookie in cookie_fn(domain_name=domain_name): cookie_jar = cookie_fn(domain_name=domain_name)
cj.set_cookie(cookie) if len(cookie_jar) and debug.logging:
except BrowserCookieError: print(f"Read cookies from {cookie_fn.__name__} for {domain_name}")
for cookie in cookie_jar:
if cookie.name not in cookies:
cookies[cookie.name] = cookie.value
except BrowserCookieError as e:
pass pass
_cookies[domain_name] = cookies
_cookies[domain_name] = {cookie.name: cookie.value for cookie in cj}
return _cookies[domain_name] return _cookies[domain_name]
@ -100,10 +110,8 @@ def format_prompt(messages: Messages, add_special_tokens=False) -> str:
def get_browser(user_data_dir: str = None): def get_browser(user_data_dir: str = None):
from undetected_chromedriver import Chrome from undetected_chromedriver import Chrome
from platformdirs import user_config_dir
if not user_data_dir: if not user_data_dir:
user_data_dir = user_config_dir("g4f") user_data_dir = user_config_dir("g4f")
user_data_dir = path.join(user_data_dir, "Default")
return Chrome(user_data_dir=user_data_dir) return Chrome(user_data_dir=user_data_dir)

View File

@ -3,7 +3,7 @@ from __future__ import annotations
import uuid, json, time import uuid, json, time
from ..base_provider import AsyncGeneratorProvider from ..base_provider import AsyncGeneratorProvider
from ..helper import get_browser, get_cookies, format_prompt from ..helper import get_browser, get_cookies, format_prompt, get_event_loop
from ...typing import AsyncResult, Messages from ...typing import AsyncResult, Messages
from ...requests import StreamSession from ...requests import StreamSession
@ -73,26 +73,33 @@ class OpenaiChat(AsyncGeneratorProvider):
last_message = new_message last_message = new_message
@classmethod @classmethod
def browse_access_token(cls) -> str: async def browse_access_token(cls) -> str:
try: def browse() -> str:
from selenium.webdriver.common.by import By try:
from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
driver = get_browser() driver = get_browser()
except ImportError: except ImportError:
return return
driver.get(f"{cls.url}/") driver.get(f"{cls.url}/")
try: try:
WebDriverWait(driver, 1200).until( WebDriverWait(driver, 1200).until(
EC.presence_of_element_located((By.ID, "prompt-textarea")) EC.presence_of_element_located((By.ID, "prompt-textarea"))
) )
javascript = "return (await (await fetch('/api/auth/session')).json())['accessToken']" javascript = "return (await (await fetch('/api/auth/session')).json())['accessToken']"
return driver.execute_script(javascript) return driver.execute_script(javascript)
finally: finally:
time.sleep(1) driver.close()
driver.quit() time.sleep(0.1)
driver.quit()
loop = get_event_loop()
return await loop.run_in_executor(
None,
browse
)
@classmethod @classmethod
async def fetch_access_token(cls, cookies: dict, proxies: dict = None) -> str: async def fetch_access_token(cls, cookies: dict, proxies: dict = None) -> str:
@ -110,7 +117,7 @@ class OpenaiChat(AsyncGeneratorProvider):
if cookies: if cookies:
cls._access_token = await cls.fetch_access_token(cookies, proxies) cls._access_token = await cls.fetch_access_token(cookies, proxies)
if not cls._access_token: if not cls._access_token:
cls._access_token = cls.browse_access_token() cls._access_token = await cls.browse_access_token()
if not cls._access_token: if not cls._access_token:
raise RuntimeError("Read access token failed") raise RuntimeError("Read access token failed")
return cls._access_token return cls._access_token

View File

@ -5,13 +5,13 @@ import random
from typing import List, Type, Dict from typing import List, Type, Dict
from ..typing import CreateResult, Messages from ..typing import CreateResult, Messages
from .base_provider import BaseProvider, AsyncProvider from .base_provider import BaseProvider, AsyncProvider
from .. import debug
class RetryProvider(AsyncProvider): class RetryProvider(AsyncProvider):
__name__: str = "RetryProvider" __name__: str = "RetryProvider"
working: bool = True working: bool = True
supports_stream: bool = True supports_stream: bool = True
logging: bool = False
def __init__( def __init__(
self, self,
@ -39,7 +39,7 @@ class RetryProvider(AsyncProvider):
started: bool = False started: bool = False
for provider in providers: for provider in providers:
try: try:
if self.logging: if debug.logging:
print(f"Using {provider.__name__} provider") print(f"Using {provider.__name__} provider")
for token in provider.create_completion(model, messages, stream, **kwargs): for token in provider.create_completion(model, messages, stream, **kwargs):
@ -51,7 +51,7 @@ class RetryProvider(AsyncProvider):
except Exception as e: except Exception as e:
self.exceptions[provider.__name__] = e self.exceptions[provider.__name__] = e
if self.logging: if debug.logging:
print(f"{provider.__name__}: {e.__class__.__name__}: {e}") print(f"{provider.__name__}: {e.__class__.__name__}: {e}")
if started: if started:
raise e raise e
@ -77,7 +77,7 @@ class RetryProvider(AsyncProvider):
) )
except Exception as e: except Exception as e:
self.exceptions[provider.__name__] = e self.exceptions[provider.__name__] = e
if self.logging: if debug.logging:
print(f"{provider.__name__}: {e.__class__.__name__}: {e}") print(f"{provider.__name__}: {e.__class__.__name__}: {e}")
self.raise_exceptions() self.raise_exceptions()

View File

@ -3,7 +3,7 @@ from requests import get
from .models import Model, ModelUtils, _all_models from .models import Model, ModelUtils, _all_models
from .Provider import BaseProvider, RetryProvider from .Provider import BaseProvider, RetryProvider
from .typing import Messages, CreateResult, Union, List from .typing import Messages, CreateResult, Union, List
from .debug import logging from . import debug
version = '0.1.7.4' version = '0.1.7.4'
version_check = True version_check = True
@ -46,8 +46,7 @@ def get_model_and_provider(model : Union[Model, str],
if not provider.supports_stream and stream: if not provider.supports_stream and stream:
raise ValueError(f'{provider.__name__} does not support "stream" argument') raise ValueError(f'{provider.__name__} does not support "stream" argument')
if logging: if debug.logging:
RetryProvider.logging = True
print(f'Using {provider.__name__} provider') print(f'Using {provider.__name__} provider')
return model, provider return model, provider