mirror of
https://github.com/hwchase17/langchain
synced 2024-11-16 06:13:16 +00:00
ed58eeb9c5
Moved the following modules to new package langchain-community in a backwards compatible fashion: ``` mv langchain/langchain/adapters community/langchain_community mv langchain/langchain/callbacks community/langchain_community/callbacks mv langchain/langchain/chat_loaders community/langchain_community mv langchain/langchain/chat_models community/langchain_community mv langchain/langchain/document_loaders community/langchain_community mv langchain/langchain/docstore community/langchain_community mv langchain/langchain/document_transformers community/langchain_community mv langchain/langchain/embeddings community/langchain_community mv langchain/langchain/graphs community/langchain_community mv langchain/langchain/llms community/langchain_community mv langchain/langchain/memory/chat_message_histories community/langchain_community mv langchain/langchain/retrievers community/langchain_community mv langchain/langchain/storage community/langchain_community mv langchain/langchain/tools community/langchain_community mv langchain/langchain/utilities community/langchain_community mv langchain/langchain/vectorstores community/langchain_community mv langchain/langchain/agents/agent_toolkits community/langchain_community mv langchain/langchain/cache.py community/langchain_community mv langchain/langchain/adapters community/langchain_community mv langchain/langchain/callbacks community/langchain_community/callbacks mv langchain/langchain/chat_loaders community/langchain_community mv langchain/langchain/chat_models community/langchain_community mv langchain/langchain/document_loaders community/langchain_community mv langchain/langchain/docstore community/langchain_community mv langchain/langchain/document_transformers community/langchain_community mv langchain/langchain/embeddings community/langchain_community mv langchain/langchain/graphs community/langchain_community mv langchain/langchain/llms community/langchain_community mv langchain/langchain/memory/chat_message_histories community/langchain_community mv langchain/langchain/retrievers community/langchain_community mv langchain/langchain/storage community/langchain_community mv langchain/langchain/tools community/langchain_community mv langchain/langchain/utilities community/langchain_community mv langchain/langchain/vectorstores community/langchain_community mv langchain/langchain/agents/agent_toolkits community/langchain_community mv langchain/langchain/cache.py community/langchain_community ``` Moved the following to core ``` mv langchain/langchain/utils/json_schema.py core/langchain_core/utils mv langchain/langchain/utils/html.py core/langchain_core/utils mv langchain/langchain/utils/strings.py core/langchain_core/utils cat langchain/langchain/utils/env.py >> core/langchain_core/utils/env.py rm langchain/langchain/utils/env.py ``` See .scripts/community_split/script_integrations.sh for all changes
96 lines
3.1 KiB
Python
96 lines
3.1 KiB
Python
"""Wrapper around Minimax chat models."""
|
|
import logging
|
|
from typing import Any, Dict, List, Optional, cast
|
|
|
|
from langchain_core.callbacks import (
|
|
AsyncCallbackManagerForLLMRun,
|
|
CallbackManagerForLLMRun,
|
|
)
|
|
from langchain_core.language_models.chat_models import BaseChatModel
|
|
from langchain_core.messages import (
|
|
AIMessage,
|
|
BaseMessage,
|
|
HumanMessage,
|
|
)
|
|
from langchain_core.outputs import ChatResult
|
|
|
|
from langchain_community.llms.minimax import MinimaxCommon
|
|
from langchain_community.llms.utils import enforce_stop_tokens
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
def _parse_message(msg_type: str, text: str) -> Dict:
|
|
return {"sender_type": msg_type, "text": text}
|
|
|
|
|
|
def _parse_chat_history(history: List[BaseMessage]) -> List:
|
|
"""Parse a sequence of messages into history."""
|
|
chat_history = []
|
|
for message in history:
|
|
content = cast(str, message.content)
|
|
if isinstance(message, HumanMessage):
|
|
chat_history.append(_parse_message("USER", content))
|
|
if isinstance(message, AIMessage):
|
|
chat_history.append(_parse_message("BOT", content))
|
|
return chat_history
|
|
|
|
|
|
class MiniMaxChat(MinimaxCommon, BaseChatModel):
|
|
"""Wrapper around Minimax large language models.
|
|
|
|
To use, you should have the environment variable ``MINIMAX_GROUP_ID`` and
|
|
``MINIMAX_API_KEY`` set with your API token, or pass it as a named parameter to
|
|
the constructor.
|
|
|
|
Example:
|
|
.. code-block:: python
|
|
|
|
from langchain_community.chat_models import MiniMaxChat
|
|
llm = MiniMaxChat(model_name="abab5-chat")
|
|
|
|
"""
|
|
|
|
def _generate(
|
|
self,
|
|
messages: List[BaseMessage],
|
|
stop: Optional[List[str]] = None,
|
|
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
|
**kwargs: Any,
|
|
) -> ChatResult:
|
|
"""Generate next turn in the conversation.
|
|
Args:
|
|
messages: The history of the conversation as a list of messages. Code chat
|
|
does not support context.
|
|
stop: The list of stop words (optional).
|
|
run_manager: The CallbackManager for LLM run, it's not used at the moment.
|
|
|
|
Returns:
|
|
The ChatResult that contains outputs generated by the model.
|
|
|
|
Raises:
|
|
ValueError: if the last message in the list is not from human.
|
|
"""
|
|
if not messages:
|
|
raise ValueError(
|
|
"You should provide at least one message to start the chat!"
|
|
)
|
|
history = _parse_chat_history(messages)
|
|
payload = self._default_params
|
|
payload["messages"] = history
|
|
text = self._client.post(payload)
|
|
|
|
# This is required since the stop are not enforced by the model parameters
|
|
return text if stop is None else enforce_stop_tokens(text, stop)
|
|
|
|
async def _agenerate(
|
|
self,
|
|
messages: List[BaseMessage],
|
|
stop: Optional[List[str]] = None,
|
|
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
|
|
**kwargs: Any,
|
|
) -> ChatResult:
|
|
raise NotImplementedError(
|
|
"""Minimax AI doesn't support async requests at the moment."""
|
|
)
|