Remove providers in the providers ,

This commit is contained in:
kqlio67 2024-10-19 13:21:19 +03:00
parent 0a1cfe1987
commit d7573a0039
28 changed files with 16 additions and 106 deletions

View File

@ -11,8 +11,6 @@ class AI365VIP(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://chat.ai365vip.com"
api_endpoint = "/api/chat"
working = True
supports_gpt_35_turbo = True
supports_gpt_4 = True
default_model = 'gpt-3.5-turbo'
models = [
'gpt-3.5-turbo',

View File

@ -12,7 +12,6 @@ class Ai4Chat(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://www.ai4chat.co"
api_endpoint = "https://www.ai4chat.co/generate-response"
working = True
supports_gpt_4 = False
supports_stream = False
supports_system_message = True
supports_message_history = True

View File

@ -12,7 +12,6 @@ class AiChatOnline(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://aichatonlineorg.erweima.ai"
api_endpoint = "/aichatonline/api/chat/gpt"
working = True
supports_gpt_4 = True
default_model = 'gpt-4o-mini'
@classmethod

View File

@ -12,7 +12,6 @@ class AiChats(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://ai-chats.org"
api_endpoint = "https://ai-chats.org/chat/send2/"
working = True
supports_gpt_4 = True
supports_message_history = True
default_model = 'gpt-4'
models = ['gpt-4', 'dalle']

View File

@ -17,9 +17,7 @@ class Airforce(AsyncGeneratorProvider, ProviderModelMixin):
working = True
default_model = 'llama-3-70b-chat'
supports_gpt_35_turbo = True
supports_gpt_4 = True
supports_stream = True
supports_system_message = True
supports_message_history = True

View File

@ -12,7 +12,6 @@ class Allyfy(AsyncGeneratorProvider):
url = "https://allyfy.chat"
api_endpoint = "https://chatbot.allyfy.chat/api/v1/message/stream/super/chat"
working = True
supports_gpt_35_turbo = True
@classmethod
async def create_async_generator(

View File

@ -37,7 +37,6 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://bing.com/chat"
working = True
supports_message_history = True
supports_gpt_4 = True
default_model = "Balanced"
default_vision_model = "gpt-4-vision"
models = [getattr(Tones, key) for key in Tones.__dict__ if not key.startswith("__")]

View File

@ -13,7 +13,6 @@ class ChatGptEs(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://chatgpt.es"
api_endpoint = "https://chatgpt.es/wp-admin/admin-ajax.php"
working = True
supports_gpt_4 = True
supports_stream = True
supports_system_message = True
supports_message_history = True

View File

@ -12,7 +12,6 @@ class Chatgpt4Online(AsyncGeneratorProvider):
url = "https://chatgpt4online.org"
api_endpoint = "/wp-json/mwai-ui/v1/chats/submit"
working = True
supports_gpt_4 = True
async def get_nonce(headers: dict) -> str:
async with ClientSession(headers=headers) as session:

View File

@ -9,7 +9,6 @@ from .helper import format_prompt
class Chatgpt4o(AsyncProvider, ProviderModelMixin):
url = "https://chatgpt4o.one"
supports_gpt_4 = True
working = True
_post_id = None
_nonce = None

View File

@ -10,7 +10,6 @@ from .helper import format_prompt
class ChatgptFree(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://chatgptfree.ai"
supports_gpt_4 = True
working = True
_post_id = None
_nonce = None

View File

@ -13,7 +13,6 @@ class DDG(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://duckduckgo.com"
api_endpoint = "https://duckduckgo.com/duckchat/v1/chat"
working = True
supports_gpt_4 = True
supports_stream = True
supports_system_message = True
supports_message_history = True

View File

@ -12,8 +12,6 @@ class DarkAI(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://www.aiuncensored.info"
api_endpoint = "https://darkai.foundation/chat"
working = True
supports_gpt_35_turbo = True
supports_gpt_4 = True
supports_stream = True
supports_system_message = True
supports_message_history = True

View File

@ -11,7 +11,6 @@ class Editee(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://editee.com"
api_endpoint = "https://editee.com/submit/chatgptfree"
working = True
supports_gpt_4 = True
supports_stream = True
supports_system_message = True
supports_message_history = True

View File

@ -13,7 +13,6 @@ from ..requests.raise_for_status import raise_for_status
class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://flowgpt.com/chat"
working = False
supports_gpt_35_turbo = True
supports_message_history = True
supports_system_message = True
default_model = "gpt-3.5-turbo"

View File

@ -13,8 +13,6 @@ class FreeNetfly(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://free.netfly.top"
api_endpoint = "/api/openai/v1/chat/completions"
working = True
supports_gpt_35_turbo = True
supports_gpt_4 = True
default_model = 'gpt-3.5-turbo'
models = [
'gpt-3.5-turbo',

View File

@ -14,7 +14,6 @@ class Koala(AsyncGeneratorProvider, ProviderModelMixin):
api_endpoint = "https://koala.sh/api/gpt/"
working = True
supports_message_history = True
supports_gpt_4 = True
default_model = 'gpt-4o-mini'
@classmethod

View File

@ -170,7 +170,6 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
working = True
supports_message_history = True
supports_system_message = True
supports_gpt_4 = True
default_model = "gpt-3.5-turbo"
models = list(models.keys())

View File

@ -14,7 +14,6 @@ class MagickPen(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://magickpen.com"
api_endpoint = "https://api.magickpen.com/ask"
working = True
supports_gpt_4 = True
supports_stream = True
supports_system_message = True
supports_message_history = True

View File

@ -1,66 +0,0 @@
from __future__ import annotations
from aiohttp import ClientSession
import json
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ..image import ImageResponse
class Nexra(AsyncGeneratorProvider, ProviderModelMixin):
label = "Nexra Animagine XL"
url = "https://nexra.aryahcr.cc/documentation/midjourney/en"
api_endpoint = "https://nexra.aryahcr.cc/api/image/complements"
working = True
default_model = 'animagine-xl'
models = [default_model]
@classmethod
def get_model(cls, model: str) -> str:
return cls.default_model
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
response: str = "url", # base64 or url
**kwargs
) -> AsyncResult:
# Retrieve the correct model to use
model = cls.get_model(model)
# Format the prompt from the messages
prompt = messages[0]['content']
headers = {
"Content-Type": "application/json"
}
payload = {
"prompt": prompt,
"model": model,
"response": response
}
async with ClientSession(headers=headers) as session:
async with session.post(cls.api_endpoint, json=payload, proxy=proxy) as response:
response.raise_for_status()
text_data = await response.text()
try:
# Parse the JSON response
json_start = text_data.find('{')
json_data = text_data[json_start:]
data = json.loads(json_data)
# Check if the response contains images
if 'images' in data and len(data['images']) > 0:
image_url = data['images'][0]
yield ImageResponse(image_url, prompt)
else:
yield ImageResponse("No images found in the response.", prompt)
except json.JSONDecodeError:
yield ImageResponse("Failed to parse JSON. Response might not be in JSON format.", prompt)

View File

@ -12,7 +12,6 @@ class Pizzagpt(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://www.pizzagpt.it"
api_endpoint = "/api/chatx-completion"
working = True
supports_gpt_4 = True
default_model = 'gpt-4o-mini'
@classmethod

View File

@ -14,7 +14,7 @@ class Prodia(AsyncGeneratorProvider, ProviderModelMixin):
working = True
default_model = 'absolutereality_v181.safetensors [3d9d4d2b]'
models = [
image_models = [
'3Guofeng3_v34.safetensors [50f420de]',
'absolutereality_V16.safetensors [37db0fc3]',
default_model,
@ -81,6 +81,7 @@ class Prodia(AsyncGeneratorProvider, ProviderModelMixin):
'timeless-1.0.ckpt [7c4971d4]',
'toonyou_beta6.safetensors [980f6b15]',
]
models = [*image_models]
@classmethod
def get_model(cls, model: str) -> str:

View File

@ -19,7 +19,6 @@ class RubiksAI(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://rubiks.ai"
api_endpoint = "https://rubiks.ai/search/api.php"
working = True
supports_gpt_4 = True
supports_stream = True
supports_system_message = True
supports_message_history = True

View File

@ -17,8 +17,6 @@ class You(AsyncGeneratorProvider, ProviderModelMixin):
label = "You.com"
url = "https://you.com"
working = True
supports_gpt_35_turbo = True
supports_gpt_4 = True
default_model = "gpt-4o-mini"
default_vision_model = "agent"
image_models = ["dall-e"]

View File

@ -5,11 +5,12 @@ from ..providers.retry_provider import RetryProvider, IterListProvider
from ..providers.base_provider import AsyncProvider, AsyncGeneratorProvider
from ..providers.create_images import CreateImagesProvider
from .deprecated import *
from .selenium import *
from .needs_auth import *
from .deprecated import *
from .selenium import *
from .needs_auth import *
from .nexra import *
from .gigachat import *
from .nexra import *
from .Ai4Chat import Ai4Chat
from .AI365VIP import AI365VIP
@ -46,7 +47,6 @@ from .FreeChatgpt import FreeChatgpt
from .FreeGpt import FreeGpt
from .FreeNetfly import FreeNetfly
from .GeminiPro import GeminiPro
from .GigaChat import GigaChat
from .GPROChat import GPROChat
from .HuggingChat import HuggingChat
from .HuggingFace import HuggingFace
@ -55,7 +55,7 @@ from .Liaobots import Liaobots
from .Local import Local
from .MagickPen import MagickPen
from .MetaAI import MetaAI
#from .MetaAIAccount import MetaAIAccount
#from .MetaAIAccount import MetaAIAccount
from .Ollama import Ollama
from .PerplexityLabs import PerplexityLabs
from .Pi import Pi

View File

@ -9,10 +9,10 @@ import json
from aiohttp import ClientSession, TCPConnector, BaseConnector
from g4f.requests import raise_for_status
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ..errors import MissingAuthError
from .helper import get_connector
from ...typing import AsyncResult, Messages
from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ...errors import MissingAuthError
from ..helper import get_connector
access_token = ""
token_expires_at = 0
@ -45,7 +45,7 @@ class GigaChat(AsyncGeneratorProvider, ProviderModelMixin):
if not api_key:
raise MissingAuthError('Missing "api_key"')
cafile = os.path.join(os.path.dirname(__file__), "gigachat_crt/russian_trusted_root_ca_pem.crt")
cafile = os.path.join(os.path.dirname(__file__), "russian_trusted_root_ca_pem.crt")
ssl_context = ssl.create_default_context(cafile=cafile) if os.path.exists(cafile) else None
if connector is None and ssl_context is not None:
connector = TCPConnector(ssl_context=ssl_context)

View File

@ -0,0 +1,2 @@
from .GigaChat import GigaChat