diff --git a/libs/community/langchain_community/chat_models/baichuan.py b/libs/community/langchain_community/chat_models/baichuan.py index c03d5ca040..ea42650725 100644 --- a/libs/community/langchain_community/chat_models/baichuan.py +++ b/libs/community/langchain_community/chat_models/baichuan.py @@ -206,7 +206,7 @@ class ChatBaichuan(BaseChatModel): Key init args — client params: api_key: Optional[str] - MiniMax API key. If not passed in will be read from env var BAICHUAN_API_KEY. + Baichuan API key. If not passed in will be read from env var BAICHUAN_API_KEY. base_url: Optional[str] Base URL for API requests. diff --git a/libs/community/langchain_community/chat_models/baidu_qianfan_endpoint.py b/libs/community/langchain_community/chat_models/baidu_qianfan_endpoint.py index 2a9cc8cec2..2390c3fd91 100644 --- a/libs/community/langchain_community/chat_models/baidu_qianfan_endpoint.py +++ b/libs/community/langchain_community/chat_models/baidu_qianfan_endpoint.py @@ -200,7 +200,7 @@ class QianfanChatEndpoint(BaseChatModel): ("system", "你是一名专业的翻译家,可以将用户的中文翻译为英文。"), ("human", "我喜欢编程。"), ] - qianfan_chat.invoke(message) + qianfan_chat.invoke(messages) .. code-block:: python @@ -219,6 +219,7 @@ class QianfanChatEndpoint(BaseChatModel): .. code-block:: python + stream = chat.stream(messages) full = next(stream) for chunk in stream: full += chunk diff --git a/libs/community/langchain_community/chat_models/sparkllm.py b/libs/community/langchain_community/chat_models/sparkllm.py index 75927d6153..dcc26a5357 100644 --- a/libs/community/langchain_community/chat_models/sparkllm.py +++ b/libs/community/langchain_community/chat_models/sparkllm.py @@ -126,9 +126,9 @@ class ChatSparkLLM(BaseChatModel): from langchain_community.chat_models import ChatSparkLLM - chat = MiniMaxChat( - api_key=api_key, - api_secret=ak, + chat = ChatSparkLLM( + api_key="your-api-key", + api_secret="your-api-secret", model='Spark4.0 Ultra', # temperature=..., # other params... diff --git a/libs/community/langchain_community/chat_models/zhipuai.py b/libs/community/langchain_community/chat_models/zhipuai.py index 4496e36076..b551b21ff8 100644 --- a/libs/community/langchain_community/chat_models/zhipuai.py +++ b/libs/community/langchain_community/chat_models/zhipuai.py @@ -199,7 +199,7 @@ class ChatZhipuAI(BaseChatModel): Key init args — completion params: model: Optional[str] - Name of OpenAI model to use. + Name of ZhipuAI model to use. temperature: float Sampling temperature. max_tokens: Optional[int] @@ -207,9 +207,9 @@ class ChatZhipuAI(BaseChatModel): Key init args — client params: api_key: Optional[str] - ZhipuAI API key. If not passed in will be read from env var ZHIPUAI_API_KEY. + ZhipuAI API key. If not passed in will be read from env var ZHIPUAI_API_KEY. api_base: Optional[str] - Base URL for API requests. + Base URL for API requests. See full list of supported init args and their descriptions in the params section. @@ -255,7 +255,7 @@ class ChatZhipuAI(BaseChatModel): .. code-block:: python - stream = llm.stream(messages) + stream = zhipuai_chat.stream(messages) full = next(stream) for chunk in stream: full += chunk