refactor(g4f/Provider/HuggingFace.py, g4f/Provider/HuggingChat.py): update provider models and aliases

pull/2220/head
kqlio67 1 week ago
parent 2e3d60a5f1
commit abefd7f36f

@ -14,8 +14,7 @@ class HuggingChat(AbstractProvider, ProviderModelMixin):
default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct"
models = [
'meta-llama/Meta-Llama-3.1-70B-Instruct',
'meta-llama/Meta-Llama-3.1-405B-Instruct-FP8',
'CohereForAI/c4ai-command-r-plus',
'CohereForAI/c4ai-command-r-plus-08-2024',
'mistralai/Mixtral-8x7B-Instruct-v0.1',
'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO',
'01-ai/Yi-1.5-34B-Chat',
@ -25,8 +24,7 @@ class HuggingChat(AbstractProvider, ProviderModelMixin):
model_aliases = {
"llama-3.1-70b": "meta-llama/Meta-Llama-3.1-70B-Instruct",
"llama-3.1-405b": "meta-llama/Meta-Llama-3.1-405B-Instruct-FP8",
"command-r-plus": "CohereForAI/c4ai-command-r-plus",
"command-r-plus": "CohereForAI/c4ai-command-r-plus-08-2024",
"mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1",
"mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
"yi-1.5-34b": "01-ai/Yi-1.5-34B-Chat",

@ -17,8 +17,7 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin):
default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct"
models = [
'meta-llama/Meta-Llama-3.1-70B-Instruct',
'meta-llama/Meta-Llama-3.1-405B-Instruct-FP8',
'CohereForAI/c4ai-command-r-plus',
'CohereForAI/c4ai-command-r-plus-08-2024',
'mistralai/Mixtral-8x7B-Instruct-v0.1',
'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO',
'01-ai/Yi-1.5-34B-Chat',
@ -28,8 +27,7 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin):
model_aliases = {
"llama-3.1-70b": "meta-llama/Meta-Llama-3.1-70B-Instruct",
"llama-3.1-405b": "meta-llama/Meta-Llama-3.1-405B-Instruct-FP8",
"command-r-plus": "CohereForAI/c4ai-command-r-plus",
"command-r-plus": "CohereForAI/c4ai-command-r-plus-08-2024",
"mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1",
"mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
"yi-1.5-34b": "01-ai/Yi-1.5-34B-Chat",

@ -186,7 +186,7 @@ llama_3_1_70b = Model(
llama_3_1_405b = Model(
name = "llama-3.1-405b",
base_provider = "Meta",
best_provider = IterListProvider([HuggingChat, Blackbox, HuggingFace])
best_provider = IterListProvider([Blackbox])
)
### Mistral ###

Loading…
Cancel
Save