feat(providers): unify Rocks and FluxAirforce into Airforce provider

This commit is contained in:
kqlio67 2024-09-13 17:09:31 +03:00
parent 29515b6946
commit 3e491c63d7
5 changed files with 399 additions and 197 deletions

255
g4f/Provider/Airforce.py Normal file
View File

@ -0,0 +1,255 @@
from __future__ import annotations
from aiohttp import ClientSession, ClientResponseError
from urllib.parse import urlencode
import json
import io
import asyncio
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ..image import ImageResponse, is_accepted_format
from .helper import format_prompt
class Airforce(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://api.airforce"
text_api_endpoint = "https://api.airforce/chat/completions"
image_api_endpoint = "https://api.airforce/v1/imagine2"
working = True
supports_gpt_35_turbo = True
supports_gpt_4 = True
supports_stream = True
supports_system_message = True
supports_message_history = True
default_model = 'llama-3-70b-chat'
text_models = [
# Open source models
'llama-2-13b-chat',
'llama-3-70b-chat',
'llama-3-70b-chat-turbo',
'llama-3-70b-chat-lite',
'llama-3-8b-chat',
'llama-3-8b-chat-turbo',
'llama-3-8b-chat-lite',
'llama-3.1-405b-turbo',
'llama-3.1-70b-turbo',
'llama-3.1-8b-turbo',
'LlamaGuard-2-8b',
'Llama-Guard-7b',
'Meta-Llama-Guard-3-8B',
'Mixtral-8x7B-Instruct-v0.1',
'Mixtral-8x22B-Instruct-v0.1',
'Mistral-7B-Instruct-v0.1',
'Mistral-7B-Instruct-v0.2',
'Mistral-7B-Instruct-v0.3',
'Qwen1.5-72B-Chat',
'Qwen1.5-110B-Chat',
'Qwen2-72B-Instruct',
'gemma-2b-it',
'gemma-2-9b-it',
'gemma-2-27b-it',
'dbrx-instruct',
'deepseek-llm-67b-chat',
'Nous-Hermes-2-Mixtral-8x7B-DPO',
'Nous-Hermes-2-Yi-34B',
'WizardLM-2-8x22B',
'SOLAR-10.7B-Instruct-v1.0',
'StripedHyena-Nous-7B',
'sparkdesk',
# Other models
'chatgpt-4o-latest',
'gpt-4',
'gpt-4-turbo',
'gpt-4o-mini-2024-07-18',
'gpt-4o-mini',
'gpt-4o',
'gpt-3.5-turbo',
'gpt-3.5-turbo-0125',
'gpt-3.5-turbo-1106',
'gpt-3.5-turbo-16k',
'gpt-3.5-turbo-0613',
'gpt-3.5-turbo-16k-0613',
'gemini-1.5-flash',
'gemini-1.5-pro',
]
image_models = [
'flux',
'flux-realism',
'flux-anime',
'flux-3d',
'flux-disney',
'flux-pixel',
'any-dark',
]
models = [
*text_models,
*image_models
]
model_aliases = {
# Open source models
"llama-2-13b": "llama-2-13b-chat",
"llama-3-70b": "llama-3-70b-chat",
"llama-3-70b": "llama-3-70b-chat-turbo",
"llama-3-70b": "llama-3-70b-chat-lite",
"llama-3-8b": "llama-3-8b-chat",
"llama-3-8b": "llama-3-8b-chat-turbo",
"llama-3-8b": "llama-3-8b-chat-lite",
"llama-3.1-405b": "llama-3.1-405b-turbo",
"llama-3.1-70b": "llama-3.1-70b-turbo",
"llama-3.1-8b": "llama-3.1-8b-turbo",
"mixtral-8x7b": "Mixtral-8x7B-Instruct-v0.1",
"mixtral-8x22b": "Mixtral-8x22B-Instruct-v0.1",
"mistral-7b": "Mistral-7B-Instruct-v0.1",
"mistral-7b": "Mistral-7B-Instruct-v0.2",
"mistral-7b": "Mistral-7B-Instruct-v0.3",
"mixtral-8x7b-dpo": "Nous-Hermes-2-Mixtral-8x7B-DPO",
"qwen-1-5-72b": "Qwen1.5-72B-Chat",
"qwen-1_5-110b": "Qwen1.5-110B-Chat",
"qwen-2-72b": "Qwen2-72B-Instruct",
"gemma-2b": "gemma-2b-it",
"gemma-2b-9b": "gemma-2-9b-it",
"gemma-2b-27b": "gemma-2-27b-it",
"deepseek": "deepseek-llm-67b-chat",
"yi-34b": "Nous-Hermes-2-Yi-34B",
"wizardlm-2-8x22b": "WizardLM-2-8x22B",
"solar-10-7b": "SOLAR-10.7B-Instruct-v1.0",
"sh-n-7b": "StripedHyena-Nous-7B",
"sparkdesk-v1.1": "sparkdesk",
# Other models
"gpt-4o": "chatgpt-4o-latest",
"gpt-4o-mini": "gpt-4o-mini-2024-07-18",
"gpt-3.5-turbo": "gpt-3.5-turbo-0125",
"gpt-3.5-turbo": "gpt-3.5-turbo-1106",
"gpt-3.5-turbo": "gpt-3.5-turbo-16k",
"gpt-3.5-turbo": "gpt-3.5-turbo-0613",
"gpt-3.5-turbo": "gpt-3.5-turbo-16k-0613",
"gemini-flash": "gemini-1.5-flash",
"gemini-pro": "gemini-1.5-pro",
}
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
model = cls.get_model(model)
headers = {
"accept": "*/*",
"accept-language": "en-US,en;q=0.9",
"content-type": "application/json",
"origin": "https://api.airforce",
"sec-ch-ua": '"Chromium";v="128", "Not(A:Brand";v="24"',
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": '"Linux"',
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "cross-site",
"user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"
}
if model in cls.image_models:
async for item in cls.generate_image(model, messages, headers, proxy, **kwargs):
yield item
else:
async for item in cls.generate_text(model, messages, headers, proxy, **kwargs):
yield item
@classmethod
async def generate_text(cls, model: str, messages: Messages, headers: dict, proxy: str, **kwargs) -> AsyncResult:
async with ClientSession(headers=headers) as session:
data = {
"messages": [{"role": "user", "content": format_prompt(messages)}],
"model": model,
"temperature": kwargs.get('temperature', 1),
"top_p": kwargs.get('top_p', 1),
"stream": True
}
async with session.post(cls.text_api_endpoint, json=data, proxy=proxy) as response:
response.raise_for_status()
async for line in response.content:
if line:
line = line.decode('utf-8').strip()
if line.startswith("data: "):
try:
data = json.loads(line[6:])
if 'choices' in data and len(data['choices']) > 0:
delta = data['choices'][0].get('delta', {})
if 'content' in delta:
yield delta['content']
except json.JSONDecodeError:
continue
elif line == "data: [DONE]":
break
@classmethod
async def generate_image(cls, model: str, messages: Messages, headers: dict, proxy: str, **kwargs) -> AsyncResult:
prompt = messages[-1]['content'] if messages else ""
params = {
"prompt": prompt,
"size": kwargs.get("size", "1:1"),
"seed": kwargs.get("seed"),
"model": model
}
params = {k: v for k, v in params.items() if v is not None}
try:
async with ClientSession(headers=headers) as session:
async with session.get(cls.image_api_endpoint, params=params, proxy=proxy) as response:
response.raise_for_status()
content = await response.read()
if response.content_type.startswith('image/'):
image_url = str(response.url)
yield ImageResponse(image_url, prompt)
else:
try:
text = content.decode('utf-8', errors='ignore')
yield f"Error: {text}"
except Exception as decode_error:
yield f"Error: Unable to decode response - {str(decode_error)}"
except ClientResponseError as e:
yield f"Error: HTTP {e.status}: {e.message}"
except Exception as e:
yield f"Unexpected error: {str(e)}"

View File

@ -1,82 +0,0 @@
from __future__ import annotations
from aiohttp import ClientSession, ClientResponseError
from urllib.parse import urlencode
import io
from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ..image import ImageResponse, is_accepted_format
class FluxAirforce(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://flux.api.airforce/"
api_endpoint = "https://api.airforce/v1/imagine2"
working = True
default_model = 'flux-realism'
models = [
'flux',
'flux-realism',
'flux-anime',
'flux-3d',
'flux-disney'
]
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
headers = {
"accept": "*/*",
"accept-language": "en-US,en;q=0.9",
"origin": "https://flux.api.airforce",
"priority": "u=1, i",
"referer": "https://flux.api.airforce/",
"sec-ch-ua": '"Chromium";v="127", "Not)A;Brand";v="99"',
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": '"Linux"',
"sec-fetch-dest": "empty",
"sec-fetch-mode": "cors",
"sec-fetch-site": "same-site",
"user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36"
}
prompt = messages[-1]['content'] if messages else ""
params = {
"prompt": prompt,
"size": kwargs.get("size", "1:1"),
"seed": kwargs.get("seed"),
"model": model
}
params = {k: v for k, v in params.items() if v is not None}
try:
async with ClientSession(headers=headers) as session:
async with session.get(f"{cls.api_endpoint}", params=params, proxy=proxy) as response:
response.raise_for_status()
content = await response.read()
if response.content_type.startswith('image/'):
image_url = str(response.url)
yield ImageResponse(image_url, prompt)
else:
try:
text = content.decode('utf-8', errors='ignore')
yield f"Error: {text}"
except Exception as decode_error:
yield f"Error: Unable to decode response - {str(decode_error)}"
except ClientResponseError as e:
yield f"Error: HTTP {e.status}: {e.message}"
except Exception as e:
yield f"Unexpected error: {str(e)}"
finally:
if not session.closed:
await session.close()

View File

@ -1,70 +0,0 @@
import asyncio
import json
from aiohttp import ClientSession
from ..typing import Messages, AsyncResult
from .base_provider import AsyncGeneratorProvider
class Rocks(AsyncGeneratorProvider):
url = "https://api.airforce"
api_endpoint = "/chat/completions"
supports_message_history = True
supports_gpt_35_turbo = True
supports_gpt_4 = True
supports_stream = True
supports_system_message = True
working = True
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
proxy: str = None,
**kwargs
) -> AsyncResult:
payload = {"messages":messages,"model":model,"max_tokens":4096,"temperature":1,"top_p":1,"stream":True}
headers = {
"Accept": "application/json",
"Accept-Encoding": "gzip, deflate, br, zstd",
"Accept-Language": "en-US,en;q=0.9",
"Authorization": "Bearer missing api key",
"Origin": "https://llmplayground.net",
"Referer": "https://llmplayground.net/",
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "same-origin",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36",
}
async with ClientSession() as session:
async with session.post(
f"{cls.url}{cls.api_endpoint}",
json=payload,
proxy=proxy,
headers=headers
) as response:
response.raise_for_status()
last_chunk_time = asyncio.get_event_loop().time()
async for line in response.content:
current_time = asyncio.get_event_loop().time()
if current_time - last_chunk_time > 5:
return
if line.startswith(b"\n"):
pass
elif "discord.com/invite/" in line.decode() or "discord.gg/" in line.decode():
pass # trolled
elif line.startswith(b"data: "):
try:
line = json.loads(line[6:])
except json.JSONDecodeError:
continue
chunk = line["choices"][0]["delta"].get("content")
if chunk:
yield chunk
last_chunk_time = current_time
else:
raise Exception(f"Unexpected line: {line}")
return

View File

@ -13,6 +13,7 @@ from .AI365VIP import AI365VIP
from .Allyfy import Allyfy from .Allyfy import Allyfy
from .AiChatOnline import AiChatOnline from .AiChatOnline import AiChatOnline
from .AiChats import AiChats from .AiChats import AiChats
from .Airforce import Airforce
from .Aura import Aura from .Aura import Aura
from .Bing import Bing from .Bing import Bing
from .BingCreateImages import BingCreateImages from .BingCreateImages import BingCreateImages
@ -28,7 +29,6 @@ from .DDG import DDG
from .DeepInfra import DeepInfra from .DeepInfra import DeepInfra
from .DeepInfraImage import DeepInfraImage from .DeepInfraImage import DeepInfraImage
from .FlowGpt import FlowGpt from .FlowGpt import FlowGpt
from .FluxAirforce import FluxAirforce
from .Free2GPT import Free2GPT from .Free2GPT import Free2GPT
from .FreeChatgpt import FreeChatgpt from .FreeChatgpt import FreeChatgpt
from .FreeGpt import FreeGpt from .FreeGpt import FreeGpt
@ -55,7 +55,6 @@ from .Reka import Reka
from .Snova import Snova from .Snova import Snova
from .Replicate import Replicate from .Replicate import Replicate
from .ReplicateHome import ReplicateHome from .ReplicateHome import ReplicateHome
from .Rocks import Rocks
from .TeachAnything import TeachAnything from .TeachAnything import TeachAnything
from .TwitterBio import TwitterBio from .TwitterBio import TwitterBio
from .Upstage import Upstage from .Upstage import Upstage

View File

@ -5,6 +5,7 @@ from dataclasses import dataclass
from .Provider import IterListProvider, ProviderType from .Provider import IterListProvider, ProviderType
from .Provider import ( from .Provider import (
AiChatOnline, AiChatOnline,
Airforce,
Allyfy, Allyfy,
Bing, Bing,
Binjie, Binjie,
@ -18,7 +19,6 @@ from .Provider import (
DDG, DDG,
DeepInfra, DeepInfra,
DeepInfraImage, DeepInfraImage,
FluxAirforce,
Free2GPT, Free2GPT,
FreeChatgpt, FreeChatgpt,
FreeGpt, FreeGpt,
@ -105,7 +105,7 @@ gpt_35_turbo = Model(
name = 'gpt-3.5-turbo', name = 'gpt-3.5-turbo',
base_provider = 'OpenAI', base_provider = 'OpenAI',
best_provider = IterListProvider([ best_provider = IterListProvider([
Allyfy, TwitterBio, Nexra, Bixin123, CodeNews, Allyfy, TwitterBio, Nexra, Bixin123, CodeNews, Airforce,
]) ])
) )
@ -114,7 +114,8 @@ gpt_4o = Model(
name = 'gpt-4o', name = 'gpt-4o',
base_provider = 'OpenAI', base_provider = 'OpenAI',
best_provider = IterListProvider([ best_provider = IterListProvider([
Liaobots, Chatgpt4o, OpenaiChat, Liaobots, Chatgpt4o, Airforce,
OpenaiChat,
]) ])
) )
@ -122,8 +123,8 @@ gpt_4o_mini = Model(
name = 'gpt-4o-mini', name = 'gpt-4o-mini',
base_provider = 'OpenAI', base_provider = 'OpenAI',
best_provider = IterListProvider([ best_provider = IterListProvider([
DDG, Liaobots, You, FreeNetfly, Pizzagpt, ChatgptFree, AiChatOnline, CodeNews, DDG, Liaobots, You, FreeNetfly, Pizzagpt, ChatgptFree, AiChatOnline, CodeNews, MagickPen, Airforce,
MagickPen, OpenaiChat, Koala, OpenaiChat, Koala,
]) ])
) )
@ -131,7 +132,7 @@ gpt_4_turbo = Model(
name = 'gpt-4-turbo', name = 'gpt-4-turbo',
base_provider = 'OpenAI', base_provider = 'OpenAI',
best_provider = IterListProvider([ best_provider = IterListProvider([
Nexra, Bixin123, Liaobots, Bing Nexra, Bixin123, Liaobots, Airforce, Bing
]) ])
) )
@ -139,7 +140,7 @@ gpt_4 = Model(
name = 'gpt-4', name = 'gpt-4',
base_provider = 'OpenAI', base_provider = 'OpenAI',
best_provider = IterListProvider([ best_provider = IterListProvider([
Chatgpt4Online, Nexra, Binjie, Bing, Chatgpt4Online, Nexra, Binjie, Airforce, Bing,
gpt_4_turbo.best_provider, gpt_4o.best_provider, gpt_4o_mini.best_provider gpt_4_turbo.best_provider, gpt_4o.best_provider, gpt_4o_mini.best_provider
]) ])
) )
@ -159,48 +160,60 @@ meta = Model(
best_provider = MetaAI best_provider = MetaAI
) )
llama_2_13b = Model(
name = "llama-2-13b",
base_provider = "Meta",
best_provider = IterListProvider([Airforce])
)
llama_3_8b = Model( llama_3_8b = Model(
name = "llama-3-8b", name = "llama-3-8b",
base_provider = "Meta", base_provider = "Meta",
best_provider = IterListProvider([DeepInfra, Replicate]) best_provider = IterListProvider([Airforce, DeepInfra, Replicate])
) )
llama_3_70b = Model( llama_3_70b = Model(
name = "llama-3-70b", name = "llama-3-70b",
base_provider = "Meta", base_provider = "Meta",
best_provider = IterListProvider([ReplicateHome, DeepInfra, Replicate]) best_provider = IterListProvider([ReplicateHome, Airforce, DeepInfra, Replicate])
) )
llama_3_1_8b = Model( llama_3_1_8b = Model(
name = "llama-3.1-8b", name = "llama-3.1-8b",
base_provider = "Meta", base_provider = "Meta",
best_provider = IterListProvider([Blackbox, PerplexityLabs]) best_provider = IterListProvider([Blackbox, Airforce, PerplexityLabs])
) )
llama_3_1_70b = Model( llama_3_1_70b = Model(
name = "llama-3.1-70b", name = "llama-3.1-70b",
base_provider = "Meta", base_provider = "Meta",
best_provider = IterListProvider([DDG, HuggingChat, FreeGpt, Blackbox, TeachAnything, Free2GPT, HuggingFace, PerplexityLabs]) best_provider = IterListProvider([DDG, HuggingChat, FreeGpt, Blackbox, TeachAnything, Free2GPT, Airforce, HuggingFace, PerplexityLabs])
) )
llama_3_1_405b = Model( llama_3_1_405b = Model(
name = "llama-3.1-405b", name = "llama-3.1-405b",
base_provider = "Meta", base_provider = "Meta",
best_provider = IterListProvider([Blackbox]) best_provider = IterListProvider([Blackbox, Airforce])
) )
### Mistral ### ### Mistral ###
mixtral_8x7b = Model(
name = "mixtral-8x7b",
base_provider = "Mistral",
best_provider = IterListProvider([HuggingChat, DDG, ReplicateHome, TwitterBio, DeepInfra, HuggingFace,])
)
mistral_7b = Model( mistral_7b = Model(
name = "mistral-7b", name = "mistral-7b",
base_provider = "Mistral", base_provider = "Mistral",
best_provider = IterListProvider([HuggingChat, HuggingFace, DeepInfra]) best_provider = IterListProvider([HuggingChat, Airforce, HuggingFace, DeepInfra])
)
mixtral_8x7b = Model(
name = "mixtral-8x7b",
base_provider = "Mistral",
best_provider = IterListProvider([HuggingChat, DDG, ReplicateHome, TwitterBio, Airforce, DeepInfra, HuggingFace])
)
mixtral_8x22b = Model(
name = "mixtral-8x22b",
base_provider = "Mistral",
best_provider = IterListProvider([Airforce])
) )
@ -208,7 +221,13 @@ mistral_7b = Model(
mixtral_8x7b_dpo = Model( mixtral_8x7b_dpo = Model(
name = "mixtral-8x7b-dpo", name = "mixtral-8x7b-dpo",
base_provider = "NousResearch", base_provider = "NousResearch",
best_provider = IterListProvider([HuggingChat, HuggingFace,]) best_provider = IterListProvider([HuggingChat, Airforce, HuggingFace])
)
yi_34b = Model(
name = 'yi-34b',
base_provider = 'NousResearch',
best_provider = IterListProvider([Airforce])
) )
@ -222,29 +241,45 @@ phi_3_mini_4k = Model(
### Google ### ### Google ###
# gemini # gemini
gemini = Model(
name = 'gemini',
base_provider = 'Google',
best_provider = Gemini
)
gemini_pro = Model( gemini_pro = Model(
name = 'gemini-pro', name = 'gemini-pro',
base_provider = 'Google', base_provider = 'Google',
best_provider = IterListProvider([GeminiPro, ChatGot, Liaobots]) best_provider = IterListProvider([GeminiPro, ChatGot, Liaobots, Airforce])
) )
gemini_flash = Model( gemini_flash = Model(
name = 'gemini-flash', name = 'gemini-flash',
base_provider = 'Google', base_provider = 'Google',
best_provider = IterListProvider([Liaobots, Blackbox]) best_provider = IterListProvider([Blackbox, Liaobots, Airforce])
) )
gemini = Model(
name = 'gemini',
base_provider = 'Google',
best_provider = IterListProvider([
Gemini,
gemini_flash.best_provider, gemini_pro.best_provider
])
)
# gemma # gemma
gemma_2b = Model( gemma_2b = Model(
name = 'gemma-2b', name = 'gemma-2b',
base_provider = 'Google', base_provider = 'Google',
best_provider = IterListProvider([ReplicateHome]) best_provider = IterListProvider([ReplicateHome, Airforce])
)
gemma_2b_9b = Model(
name = 'gemma-2b-9b',
base_provider = 'Google',
best_provider = IterListProvider([Airforce])
)
gemma_2b_27b = Model(
name = 'gemma-2b-27b',
base_provider = 'Google',
best_provider = IterListProvider([Airforce])
) )
### Anthropic ### ### Anthropic ###
@ -311,7 +346,7 @@ blackbox = Model(
dbrx_instruct = Model( dbrx_instruct = Model(
name = 'dbrx-instruct', name = 'dbrx-instruct',
base_provider = 'Databricks', base_provider = 'Databricks',
best_provider = IterListProvider([DeepInfra]) best_provider = IterListProvider([Airforce, DeepInfra])
) )
@ -327,7 +362,7 @@ command_r_plus = Model(
sparkdesk_v1_1 = Model( sparkdesk_v1_1 = Model(
name = 'sparkdesk-v1.1', name = 'sparkdesk-v1.1',
base_provider = 'iFlytek', base_provider = 'iFlytek',
best_provider = IterListProvider([FreeChatgpt]) best_provider = IterListProvider([FreeChatgpt, Airforce])
) )
### Qwen ### ### Qwen ###
@ -337,6 +372,24 @@ qwen_1_5_14b = Model(
best_provider = IterListProvider([FreeChatgpt]) best_provider = IterListProvider([FreeChatgpt])
) )
qwen_1_5_72b = Model(
name = 'qwen-1.5-72b',
base_provider = 'Qwen',
best_provider = IterListProvider([Airforce])
)
qwen_1_5_110b = Model(
name = 'qwen-1.5-110b',
base_provider = 'Qwen',
best_provider = IterListProvider([Airforce])
)
qwen_2_72b = Model(
name = 'qwen-2-72b',
base_provider = 'Qwen',
best_provider = IterListProvider([Airforce])
)
qwen_turbo = Model( qwen_turbo = Model(
name = 'qwen-turbo', name = 'qwen-turbo',
base_provider = 'Qwen', base_provider = 'Qwen',
@ -360,7 +413,10 @@ glm_4_9b = Model(
glm_4 = Model( glm_4 = Model(
name = 'glm-4', name = 'glm-4',
base_provider = 'Zhipu AI', base_provider = 'Zhipu AI',
best_provider = IterListProvider([CodeNews, glm_4_9b.best_provider,]) best_provider = IterListProvider([
CodeNews,
glm_3_6b.best_provider, glm_4_9b.best_provider
])
) )
### 01-ai ### ### 01-ai ###
@ -370,14 +426,20 @@ yi_1_5_9b = Model(
best_provider = IterListProvider([FreeChatgpt]) best_provider = IterListProvider([FreeChatgpt])
) )
### Upstage ###
### Pi ###
solar_1_mini = Model( solar_1_mini = Model(
name = 'solar-1-mini', name = 'solar-1-mini',
base_provider = 'Upstage', base_provider = 'Upstage',
best_provider = IterListProvider([Upstage]) best_provider = IterListProvider([Upstage])
) )
solar_10_7b = Model(
name = 'solar-10-7b',
base_provider = 'Upstage',
best_provider = Airforce
)
### Pi ### ### Pi ###
pi = Model( pi = Model(
name = 'pi', name = 'pi',
@ -410,10 +472,22 @@ westlake_7b_v2 = Model(
deepseek = Model( deepseek = Model(
name = 'deepseek', name = 'deepseek',
base_provider = 'DeepSeek', base_provider = 'DeepSeek',
best_provider = CodeNews best_provider = IterListProvider([CodeNews, Airforce])
) )
### WizardLM ###
wizardlm_2_8x22b = Model(
name = 'wizardlm-2-8x22b',
base_provider = 'WizardLM',
best_provider = Airforce
)
### Together ###
sh_n_7b = Model(
name = 'sh-n-7b',
base_provider = 'Together',
best_provider = Airforce
)
############# #############
### Image ### ### Image ###
@ -446,35 +520,42 @@ playground_v2_5 = Model(
flux = Model( flux = Model(
name = 'flux', name = 'flux',
base_provider = 'Flux AI', base_provider = 'Flux AI',
best_provider = IterListProvider([FluxAirforce]) best_provider = IterListProvider([Airforce])
) )
flux_realism = Model( flux_realism = Model(
name = 'flux-realism', name = 'flux-realism',
base_provider = 'Flux AI', base_provider = 'Flux AI',
best_provider = IterListProvider([FluxAirforce]) best_provider = IterListProvider([Airforce])
) )
flux_anime = Model( flux_anime = Model(
name = 'flux-anime', name = 'flux-anime',
base_provider = 'Flux AI', base_provider = 'Flux AI',
best_provider = IterListProvider([FluxAirforce]) best_provider = IterListProvider([Airforce])
) )
flux_3d = Model( flux_3d = Model(
name = 'flux-3d', name = 'flux-3d',
base_provider = 'Flux AI', base_provider = 'Flux AI',
best_provider = IterListProvider([FluxAirforce]) best_provider = IterListProvider([Airforce])
) )
flux_disney = Model( flux_disney = Model(
name = 'flux-disney', name = 'flux-disney',
base_provider = 'Flux AI', base_provider = 'Flux AI',
best_provider = IterListProvider([FluxAirforce]) best_provider = IterListProvider([Airforce])
)
flux_pixel = Model(
name = 'flux-pixel',
base_provider = 'Flux AI',
best_provider = IterListProvider([Airforce])
) )
@ -537,6 +618,9 @@ class ModelUtils:
### Meta ### ### Meta ###
"meta-ai": meta, "meta-ai": meta,
# llama-2
'llama-2-13b': llama_2_13b,
# llama-3 # llama-3
'llama-3-8b': llama_3_8b, 'llama-3-8b': llama_3_8b,
'llama-3-70b': llama_3_70b, 'llama-3-70b': llama_3_70b,
@ -545,14 +629,19 @@ class ModelUtils:
'llama-3.1-8b': llama_3_1_8b, 'llama-3.1-8b': llama_3_1_8b,
'llama-3.1-70b': llama_3_1_70b, 'llama-3.1-70b': llama_3_1_70b,
'llama-3.1-405b': llama_3_1_405b, 'llama-3.1-405b': llama_3_1_405b,
### Mistral ### ### Mistral ###
'mixtral-8x7b': mixtral_8x7b,
'mistral-7b': mistral_7b, 'mistral-7b': mistral_7b,
'mixtral-8x7b': mixtral_8x7b,
'mixtral-8x22b': mixtral_8x22b,
### NousResearch ### ### NousResearch ###
'mixtral-8x7b-dpo': mixtral_8x7b_dpo, 'mixtral-8x7b-dpo': mixtral_8x7b_dpo,
'yi-34b': yi_34b,
### Microsoft ### ### Microsoft ###
'phi-3-mini-4k': phi_3_mini_4k, 'phi-3-mini-4k': phi_3_mini_4k,
@ -566,6 +655,8 @@ class ModelUtils:
# gemma # gemma
'gemma-2b': gemma_2b, 'gemma-2b': gemma_2b,
'gemma-2b-9b': gemma_2b_9b,
'gemma-2b-27b': gemma_2b_27b,
### Anthropic ### ### Anthropic ###
@ -575,8 +666,8 @@ class ModelUtils:
'claude-3-opus': claude_3_opus, 'claude-3-opus': claude_3_opus,
'claude-3-sonnet': claude_3_sonnet, 'claude-3-sonnet': claude_3_sonnet,
'claude-3-5-sonnet': claude_3_5_sonnet,
'claude-3-haiku': claude_3_haiku, 'claude-3-haiku': claude_3_haiku,
'claude-3-5-sonnet': claude_3_5_sonnet,
### Reka AI ### ### Reka AI ###
@ -605,6 +696,9 @@ class ModelUtils:
### Qwen ### ### Qwen ###
'qwen-1.5-14b': qwen_1_5_14b, 'qwen-1.5-14b': qwen_1_5_14b,
'qwen-1.5-72b': qwen_1_5_72b,
'qwen-1.5-110b': qwen_1_5_110b,
'qwen-2-72b': qwen_2_72b,
'qwen-turbo': qwen_turbo, 'qwen-turbo': qwen_turbo,
@ -620,6 +714,7 @@ class ModelUtils:
### Upstage ### ### Upstage ###
'solar-1-mini': solar_1_mini, 'solar-1-mini': solar_1_mini,
'solar-10-7b': solar_10_7b,
### Pi ### ### Pi ###
@ -640,6 +735,10 @@ class ModelUtils:
### DeepSeek ### ### DeepSeek ###
'deepseek': deepseek, 'deepseek': deepseek,
### Together ###
'sh-n-7b': sh_n_7b,
@ -662,6 +761,7 @@ class ModelUtils:
'flux-anime': flux_anime, 'flux-anime': flux_anime,
'flux-3d': flux_3d, 'flux-3d': flux_3d,
'flux-disney': flux_disney, 'flux-disney': flux_disney,
'flux-pixel': flux_pixel,
### ### ### ###