mirror of https://github.com/xtekky/gpt4free
Update provider and model list (#1568)
Move bing.create_images and cookies helper Disable some providerspull/1570/head
parent
1d6709dafc
commit
5c75972c50
@ -0,0 +1,94 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import time
|
||||||
|
import os
|
||||||
|
from typing import Generator
|
||||||
|
|
||||||
|
from ..cookies import get_cookies
|
||||||
|
from ..webdriver import WebDriver, get_driver_cookies, get_browser
|
||||||
|
from ..image import ImageResponse
|
||||||
|
from ..errors import MissingRequirementsError, MissingAuthError
|
||||||
|
from .bing.create_images import BING_URL, create_images, create_session
|
||||||
|
|
||||||
|
BING_URL = "https://www.bing.com"
|
||||||
|
TIMEOUT_LOGIN = 1200
|
||||||
|
|
||||||
|
def wait_for_login(driver: WebDriver, timeout: int = TIMEOUT_LOGIN) -> None:
|
||||||
|
"""
|
||||||
|
Waits for the user to log in within a given timeout period.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
driver (WebDriver): Webdriver for browser automation.
|
||||||
|
timeout (int): Maximum waiting time in seconds.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If the login process exceeds the timeout.
|
||||||
|
"""
|
||||||
|
driver.get(f"{BING_URL}/")
|
||||||
|
start_time = time.time()
|
||||||
|
while not driver.get_cookie("_U"):
|
||||||
|
if time.time() - start_time > timeout:
|
||||||
|
raise RuntimeError("Timeout error")
|
||||||
|
time.sleep(0.5)
|
||||||
|
|
||||||
|
def get_cookies_from_browser(proxy: str = None) -> dict[str, str]:
|
||||||
|
"""
|
||||||
|
Retrieves cookies from the browser using webdriver.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
proxy (str, optional): Proxy configuration.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, str]: Retrieved cookies.
|
||||||
|
"""
|
||||||
|
with get_browser(proxy=proxy) as driver:
|
||||||
|
wait_for_login(driver)
|
||||||
|
time.sleep(1)
|
||||||
|
return get_driver_cookies(driver)
|
||||||
|
|
||||||
|
class CreateImagesBing:
|
||||||
|
"""A class for creating images using Bing."""
|
||||||
|
|
||||||
|
def __init__(self, cookies: dict[str, str] = {}, proxy: str = None) -> None:
|
||||||
|
self.cookies = cookies
|
||||||
|
self.proxy = proxy
|
||||||
|
|
||||||
|
def create_completion(self, prompt: str) -> Generator[ImageResponse, None, None]:
|
||||||
|
"""
|
||||||
|
Generator for creating imagecompletion based on a prompt.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
prompt (str): Prompt to generate images.
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
Generator[str, None, None]: The final output as markdown formatted string with images.
|
||||||
|
"""
|
||||||
|
cookies = self.cookies or get_cookies(".bing.com", False)
|
||||||
|
if "_U" not in cookies:
|
||||||
|
login_url = os.environ.get("G4F_LOGIN_URL")
|
||||||
|
if login_url:
|
||||||
|
yield f"Please login: [Bing]({login_url})\n\n"
|
||||||
|
try:
|
||||||
|
self.cookies = get_cookies_from_browser(self.proxy)
|
||||||
|
except MissingRequirementsError as e:
|
||||||
|
raise MissingAuthError(f'Missing "_U" cookie. {e}')
|
||||||
|
yield asyncio.run(self.create_async(prompt))
|
||||||
|
|
||||||
|
async def create_async(self, prompt: str) -> ImageResponse:
|
||||||
|
"""
|
||||||
|
Asynchronously creates a markdown formatted string with images based on the prompt.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
prompt (str): Prompt to generate images.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Markdown formatted string with images.
|
||||||
|
"""
|
||||||
|
cookies = self.cookies or get_cookies(".bing.com", False)
|
||||||
|
if "_U" not in cookies:
|
||||||
|
raise MissingAuthError('Missing "_U" cookie')
|
||||||
|
proxy = os.environ.get("G4F_PROXY")
|
||||||
|
async with create_session(cookies, proxy) as session:
|
||||||
|
images = await create_images(session, prompt, self.proxy)
|
||||||
|
return ImageResponse(images, prompt, {"preview": "{image}?w=200&h=200"})
|
@ -0,0 +1,97 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
try:
|
||||||
|
from platformdirs import user_config_dir
|
||||||
|
has_platformdirs = True
|
||||||
|
except ImportError:
|
||||||
|
has_platformdirs = False
|
||||||
|
try:
|
||||||
|
from browser_cookie3 import (
|
||||||
|
chrome, chromium, opera, opera_gx,
|
||||||
|
brave, edge, vivaldi, firefox,
|
||||||
|
_LinuxPasswordManager, BrowserCookieError
|
||||||
|
)
|
||||||
|
has_browser_cookie3 = True
|
||||||
|
except ImportError:
|
||||||
|
has_browser_cookie3 = False
|
||||||
|
|
||||||
|
from .typing import Dict, Cookies
|
||||||
|
from .errors import MissingRequirementsError
|
||||||
|
from . import debug
|
||||||
|
|
||||||
|
# Global variable to store cookies
|
||||||
|
_cookies: Dict[str, Cookies] = {}
|
||||||
|
|
||||||
|
if has_browser_cookie3 and os.environ.get('DBUS_SESSION_BUS_ADDRESS') == "/dev/null":
|
||||||
|
_LinuxPasswordManager.get_password = lambda a, b: b"secret"
|
||||||
|
|
||||||
|
def get_cookies(domain_name: str = '', raise_requirements_error: bool = True) -> Dict[str, str]:
|
||||||
|
"""
|
||||||
|
Load cookies for a given domain from all supported browsers and cache the results.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
domain_name (str): The domain for which to load cookies.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, str]: A dictionary of cookie names and values.
|
||||||
|
"""
|
||||||
|
if domain_name in _cookies:
|
||||||
|
return _cookies[domain_name]
|
||||||
|
|
||||||
|
cookies = load_cookies_from_browsers(domain_name, raise_requirements_error)
|
||||||
|
_cookies[domain_name] = cookies
|
||||||
|
return cookies
|
||||||
|
|
||||||
|
def set_cookies(domain_name: str, cookies: Cookies = None) -> None:
|
||||||
|
if cookies:
|
||||||
|
_cookies[domain_name] = cookies
|
||||||
|
elif domain_name in _cookies:
|
||||||
|
_cookies.pop(domain_name)
|
||||||
|
|
||||||
|
def load_cookies_from_browsers(domain_name: str, raise_requirements_error: bool = True) -> Cookies:
|
||||||
|
"""
|
||||||
|
Helper function to load cookies from various browsers.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
domain_name (str): The domain for which to load cookies.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, str]: A dictionary of cookie names and values.
|
||||||
|
"""
|
||||||
|
if not has_browser_cookie3:
|
||||||
|
if raise_requirements_error:
|
||||||
|
raise MissingRequirementsError('Install "browser_cookie3" package')
|
||||||
|
return {}
|
||||||
|
cookies = {}
|
||||||
|
for cookie_fn in [_g4f, chrome, chromium, opera, opera_gx, brave, edge, vivaldi, firefox]:
|
||||||
|
try:
|
||||||
|
cookie_jar = cookie_fn(domain_name=domain_name)
|
||||||
|
if len(cookie_jar) and debug.logging:
|
||||||
|
print(f"Read cookies from {cookie_fn.__name__} for {domain_name}")
|
||||||
|
for cookie in cookie_jar:
|
||||||
|
if cookie.name not in cookies:
|
||||||
|
cookies[cookie.name] = cookie.value
|
||||||
|
except BrowserCookieError:
|
||||||
|
pass
|
||||||
|
except Exception as e:
|
||||||
|
if debug.logging:
|
||||||
|
print(f"Error reading cookies from {cookie_fn.__name__} for {domain_name}: {e}")
|
||||||
|
return cookies
|
||||||
|
|
||||||
|
def _g4f(domain_name: str) -> list:
|
||||||
|
"""
|
||||||
|
Load cookies from the 'g4f' browser (if exists).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
domain_name (str): The domain for which to load cookies.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of cookies.
|
||||||
|
"""
|
||||||
|
if not has_platformdirs:
|
||||||
|
return []
|
||||||
|
user_data_dir = user_config_dir("g4f")
|
||||||
|
cookie_file = os.path.join(user_data_dir, "Default", "Cookies")
|
||||||
|
return [] if not os.path.exists(cookie_file) else chrome(cookie_file, domain_name)
|
Loading…
Reference in New Issue