diff --git a/g4f/Provider/HuggingChat.py b/g4f/Provider/HuggingChat.py index 28e58768..fad44957 100644 --- a/g4f/Provider/HuggingChat.py +++ b/g4f/Provider/HuggingChat.py @@ -13,11 +13,10 @@ class HuggingChat(AbstractProvider, ProviderModelMixin): supports_stream = True default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct" models = [ - 'meta-llama/Meta-Llama-3.1-70B-Instruct', + default_model, 'CohereForAI/c4ai-command-r-plus-08-2024', 'mistralai/Mixtral-8x7B-Instruct-v0.1', 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO', - '01-ai/Yi-1.5-34B-Chat', 'mistralai/Mistral-7B-Instruct-v0.3', 'microsoft/Phi-3-mini-4k-instruct', ] @@ -27,7 +26,6 @@ class HuggingChat(AbstractProvider, ProviderModelMixin): "command-r-plus": "CohereForAI/c4ai-command-r-plus-08-2024", "mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1", "mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", - "yi-1.5-34b": "01-ai/Yi-1.5-34B-Chat", "mistral-7b": "mistralai/Mistral-7B-Instruct-v0.3", "phi-3-mini-4k": "microsoft/Phi-3-mini-4k-instruct", } diff --git a/g4f/Provider/HuggingFace.py b/g4f/Provider/HuggingFace.py index 46cfcca3..4fe02739 100644 --- a/g4f/Provider/HuggingFace.py +++ b/g4f/Provider/HuggingFace.py @@ -16,11 +16,10 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): supports_message_history = True default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct" models = [ - 'meta-llama/Meta-Llama-3.1-70B-Instruct', + default_model, 'CohereForAI/c4ai-command-r-plus-08-2024', 'mistralai/Mixtral-8x7B-Instruct-v0.1', 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO', - '01-ai/Yi-1.5-34B-Chat', 'mistralai/Mistral-7B-Instruct-v0.3', 'microsoft/Phi-3-mini-4k-instruct', ] @@ -30,7 +29,6 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin): "command-r-plus": "CohereForAI/c4ai-command-r-plus-08-2024", "mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1", "mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", - "yi-1.5-34b": "01-ai/Yi-1.5-34B-Chat", "mistral-7b": "mistralai/Mistral-7B-Instruct-v0.3", "phi-3-mini-4k": "microsoft/Phi-3-mini-4k-instruct", } diff --git a/g4f/models.py b/g4f/models.py index 2414719b..44142094 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -202,13 +202,6 @@ mistral_7b = Model( best_provider = IterListProvider([HuggingChat, HuggingFace, DeepInfra]) ) -### 01-ai ### -yi_1_5_34b = Model( - name = "yi-1.5-34b", - base_provider = "01-ai", - best_provider = IterListProvider([HuggingChat, HuggingFace]) -) - ### Microsoft ### phi_3_mini_4k = Model( @@ -548,10 +541,6 @@ class ModelUtils: ### Mistral ### 'mixtral-8x7b': mixtral_8x7b, 'mistral-7b': mistral_7b, - - -### 01-ai ### -'yi-1.5-34b': yi_1_5_34b, ### Microsoft ###