diff --git a/libs/core/langchain_core/agents.py b/libs/core/langchain_core/agents.py index d13631365b..6ed6947e7e 100644 --- a/libs/core/langchain_core/agents.py +++ b/libs/core/langchain_core/agents.py @@ -1,7 +1,7 @@ from __future__ import annotations import json -from typing import Any, Literal, Sequence, Union +from typing import Any, List, Literal, Sequence, Union from langchain_core.load.serializable import Serializable from langchain_core.messages import ( @@ -40,6 +40,11 @@ class AgentAction(Serializable): """Return whether or not the class is serializable.""" return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "agent"] + @property def messages(self) -> Sequence[BaseMessage]: """Return the messages that correspond to this action.""" @@ -98,6 +103,11 @@ class AgentFinish(Serializable): """Return whether or not the class is serializable.""" return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "agent"] + @property def messages(self) -> Sequence[BaseMessage]: """Return the messages that correspond to this observation.""" diff --git a/libs/core/langchain_core/documents/base.py b/libs/core/langchain_core/documents/base.py index 40b8498e78..f33d7c82b0 100644 --- a/libs/core/langchain_core/documents/base.py +++ b/libs/core/langchain_core/documents/base.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Literal +from typing import List, Literal from langchain_core.load.serializable import Serializable from langchain_core.pydantic_v1 import Field @@ -21,3 +21,8 @@ class Document(Serializable): def is_lc_serializable(cls) -> bool: """Return whether this class is serializable.""" return True + + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "document"] diff --git a/libs/core/langchain_core/load/load.py b/libs/core/langchain_core/load/load.py index 2a0161ded2..a57a6dadab 100644 --- a/libs/core/langchain_core/load/load.py +++ b/libs/core/langchain_core/load/load.py @@ -3,6 +3,7 @@ import json import os from typing import Any, Dict, List, Optional +from langchain_core.load.mapping import SERIALIZABLE_MAPPING from langchain_core.load.serializable import Serializable DEFAULT_NAMESPACES = ["langchain", "langchain_core"] @@ -62,8 +63,21 @@ class Reviver: if len(namespace) == 1 and namespace[0] == "langchain": raise ValueError(f"Invalid namespace: {value}") - mod = importlib.import_module(".".join(namespace)) - cls = getattr(mod, name) + # Get the importable path + key = tuple(namespace + [name]) + if key not in SERIALIZABLE_MAPPING: + raise ValueError( + "Trying to deserialize something that cannot " + "be deserialized in current version of langchain-core: " + f"{key}" + ) + import_path = SERIALIZABLE_MAPPING[key] + # Split into module and name + import_dir, import_obj = import_path[:-1], import_path[-1] + # Import module + mod = importlib.import_module(".".join(import_dir)) + # Import class + cls = getattr(mod, import_obj) # The class must be a subclass of Serializable. if not issubclass(cls, Serializable): diff --git a/libs/core/langchain_core/load/mapping.py b/libs/core/langchain_core/load/mapping.py new file mode 100644 index 0000000000..923525f0fd --- /dev/null +++ b/libs/core/langchain_core/load/mapping.py @@ -0,0 +1,478 @@ +# First value is the value that it is serialized as +# Second value is the path to load it from +SERIALIZABLE_MAPPING = { + ("langchain", "schema", "messages", "AIMessage"): ( + "langchain_core", + "messages", + "ai", + "AIMessage", + ), + ("langchain", "schema", "messages", "AIMessageChunk"): ( + "langchain_core", + "messages", + "ai", + "AIMessageChunk", + ), + ("langchain", "schema", "messages", "BaseMessage"): ( + "langchain_core", + "messages", + "base", + "BaseMessage", + ), + ("langchain", "schema", "messages", "BaseMessageChunk"): ( + "langchain_core", + "messages", + "base", + "BaseMessageChunk", + ), + ("langchain", "schema", "messages", "ChatMessage"): ( + "langchain_core", + "messages", + "chat", + "ChatMessage", + ), + ("langchain", "schema", "messages", "FunctionMessage"): ( + "langchain_core", + "messages", + "function", + "FunctionMessage", + ), + ("langchain", "schema", "messages", "HumanMessage"): ( + "langchain_core", + "messages", + "human", + "HumanMessage", + ), + ("langchain", "schema", "messages", "SystemMessage"): ( + "langchain_core", + "messages", + "system", + "SystemMessage", + ), + ("langchain", "schema", "messages", "ToolMessage"): ( + "langchain_core", + "messages", + "tool", + "ToolMessage", + ), + ("langchain", "schema", "agent", "AgentAction"): ( + "langchain_core", + "agents", + "AgentAction", + ), + ("langchain", "schema", "agent", "AgentFinish"): ( + "langchain_core", + "agents", + "AgentFinish", + ), + ("langchain", "schema", "prompt_template", "BasePromptTemplate"): ( + "langchain_core", + "prompts", + "base", + "BasePromptTemplate", + ), + ("langchain", "chains", "llm", "LLMChain"): ( + "langchain", + "chains", + "llm", + "LLMChain", + ), + ("langchain", "prompts", "prompt", "PromptTemplate"): ( + "langchain_core", + "prompts", + "prompt", + "PromptTemplate", + ), + ("langchain", "prompts", "chat", "MessagesPlaceholder"): ( + "langchain_core", + "prompts", + "chat", + "MessagesPlaceholder", + ), + ("langchain", "llms", "openai", "OpenAI"): ( + "langchain", + "llms", + "openai", + "OpenAI", + ), + ("langchain", "prompts", "chat", "ChatPromptTemplate"): ( + "langchain_core", + "prompts", + "chat", + "ChatPromptTemplate", + ), + ("langchain", "prompts", "chat", "HumanMessagePromptTemplate"): ( + "langchain_core", + "prompts", + "chat", + "HumanMessagePromptTemplate", + ), + ("langchain", "prompts", "chat", "SystemMessagePromptTemplate"): ( + "langchain_core", + "prompts", + "chat", + "SystemMessagePromptTemplate", + ), + ("langchain", "schema", "agent", "AgentActionMessageLog"): ( + "langchain_core", + "agents", + "AgentActionMessageLog", + ), + ("langchain", "schema", "agent", "OpenAIToolAgentAction"): ( + "langchain", + "agents", + "output_parsers", + "openai_tools", + "OpenAIToolAgentAction", + ), + ("langchain", "prompts", "chat", "BaseMessagePromptTemplate"): ( + "langchain_core", + "prompts", + "chat", + "BaseMessagePromptTemplate", + ), + ("langchain", "schema", "output", "ChatGeneration"): ( + "langchain_core", + "outputs", + "chat_generation", + "ChatGeneration", + ), + ("langchain", "schema", "output", "Generation"): ( + "langchain_core", + "outputs", + "generation", + "Generation", + ), + ("langchain", "schema", "document", "Document"): ( + "langchain_core", + "documents", + "base", + "Document", + ), + ("langchain", "output_parsers", "fix", "OutputFixingParser"): ( + "langchain", + "output_parsers", + "fix", + "OutputFixingParser", + ), + ("langchain", "prompts", "chat", "AIMessagePromptTemplate"): ( + "langchain_core", + "prompts", + "chat", + "AIMessagePromptTemplate", + ), + ("langchain", "output_parsers", "regex", "RegexParser"): ( + "langchain", + "output_parsers", + "regex", + "RegexParser", + ), + ("langchain", "schema", "runnable", "DynamicRunnable"): ( + "langchain_core", + "runnables", + "configurable", + "DynamicRunnable", + ), + ("langchain", "schema", "prompt", "PromptValue"): ( + "langchain_core", + "prompt_values", + "PromptValue", + ), + ("langchain", "schema", "runnable", "RunnableBinding"): ( + "langchain_core", + "runnables", + "base", + "RunnableBinding", + ), + ("langchain", "schema", "runnable", "RunnableBranch"): ( + "langchain_core", + "runnables", + "branch", + "RunnableBranch", + ), + ("langchain", "schema", "runnable", "RunnableWithFallbacks"): ( + "langchain_core", + "runnables", + "fallbacks", + "RunnableWithFallbacks", + ), + ("langchain", "schema", "output_parser", "StrOutputParser"): ( + "langchain_core", + "output_parsers", + "string", + "StrOutputParser", + ), + ("langchain", "chat_models", "openai", "ChatOpenAI"): ( + "langchain", + "chat_models", + "openai", + "ChatOpenAI", + ), + ("langchain", "output_parsers", "list", "CommaSeparatedListOutputParser"): ( + "langchain_core", + "output_parsers", + "list", + "CommaSeparatedListOutputParser", + ), + ("langchain", "schema", "runnable", "RunnableParallel"): ( + "langchain_core", + "runnables", + "base", + "RunnableParallel", + ), + ("langchain", "chat_models", "azure_openai", "AzureChatOpenAI"): ( + "langchain", + "chat_models", + "azure_openai", + "AzureChatOpenAI", + ), + ("langchain", "chat_models", "bedrock", "BedrockChat"): ( + "langchain", + "chat_models", + "bedrock", + "BedrockChat", + ), + ("langchain", "chat_models", "anthropic", "ChatAnthropic"): ( + "langchain", + "chat_models", + "anthropic", + "ChatAnthropic", + ), + ("langchain", "chat_models", "fireworks", "ChatFireworks"): ( + "langchain", + "chat_models", + "fireworks", + "ChatFireworks", + ), + ("langchain", "chat_models", "google_palm", "ChatGooglePalm"): ( + "langchain", + "chat_models", + "google_palm", + "ChatGooglePalm", + ), + ("langchain", "chat_models", "vertexai", "ChatVertexAI"): ( + "langchain", + "chat_models", + "vertexai", + "ChatVertexAI", + ), + ("langchain", "schema", "output", "ChatGenerationChunk"): ( + "langchain_core", + "outputs", + "chat_generation", + "ChatGenerationChunk", + ), + ("langchain", "schema", "messages", "ChatMessageChunk"): ( + "langchain_core", + "messages", + "chat", + "ChatMessageChunk", + ), + ("langchain", "schema", "messages", "HumanMessageChunk"): ( + "langchain_core", + "messages", + "human", + "HumanMessageChunk", + ), + ("langchain", "schema", "messages", "FunctionMessageChunk"): ( + "langchain_core", + "messages", + "function", + "FunctionMessageChunk", + ), + ("langchain", "schema", "messages", "SystemMessageChunk"): ( + "langchain_core", + "messages", + "system", + "SystemMessageChunk", + ), + ("langchain", "schema", "messages", "ToolMessageChunk"): ( + "langchain_core", + "messages", + "tool", + "ToolMessageChunk", + ), + ("langchain", "schema", "output", "GenerationChunk"): ( + "langchain_core", + "outputs", + "generation", + "GenerationChunk", + ), + ("langchain", "llms", "openai", "BaseOpenAI"): ( + "langchain", + "llms", + "openai", + "BaseOpenAI", + ), + ("langchain", "llms", "bedrock", "Bedrock"): ( + "langchain", + "llms", + "bedrock", + "Bedrock", + ), + ("langchain", "llms", "fireworks", "Fireworks"): ( + "langchain", + "llms", + "fireworks", + "Fireworks", + ), + ("langchain", "llms", "google_palm", "GooglePalm"): ( + "langchain", + "llms", + "google_palm", + "GooglePalm", + ), + ("langchain", "llms", "openai", "AzureOpenAI"): ( + "langchain", + "llms", + "openai", + "AzureOpenAI", + ), + ("langchain", "llms", "replicate", "Replicate"): ( + "langchain", + "llms", + "replicate", + "Replicate", + ), + ("langchain", "llms", "vertexai", "VertexAI"): ( + "langchain", + "llms", + "vertexai", + "VertexAI", + ), + ("langchain", "output_parsers", "combining", "CombiningOutputParser"): ( + "langchain", + "output_parsers", + "combining", + "CombiningOutputParser", + ), + ("langchain", "schema", "prompt_template", "BaseChatPromptTemplate"): ( + "langchain_core", + "prompts", + "chat", + "BaseChatPromptTemplate", + ), + ("langchain", "prompts", "chat", "ChatMessagePromptTemplate"): ( + "langchain_core", + "prompts", + "chat", + "ChatMessagePromptTemplate", + ), + ("langchain", "prompts", "few_shot_with_templates", "FewShotPromptWithTemplates"): ( + "langchain_core", + "prompts", + "few_shot_with_templates", + "FewShotPromptWithTemplates", + ), + ("langchain", "prompts", "pipeline", "PipelinePromptTemplate"): ( + "langchain_core", + "prompts", + "pipeline", + "PipelinePromptTemplate", + ), + ("langchain", "prompts", "base", "StringPromptTemplate"): ( + "langchain_core", + "prompts", + "string", + "StringPromptTemplate", + ), + ("langchain", "prompts", "base", "StringPromptValue"): ( + "langchain_core", + "prompt_values", + "StringPromptValue", + ), + ("langchain", "prompts", "chat", "BaseStringMessagePromptTemplate"): ( + "langchain_core", + "prompts", + "chat", + "BaseStringMessagePromptTemplate", + ), + ("langchain", "prompts", "chat", "ChatPromptValue"): ( + "langchain_core", + "prompt_values", + "ChatPromptValue", + ), + ("langchain", "prompts", "chat", "ChatPromptValueConcrete"): ( + "langchain_core", + "prompt_values", + "ChatPromptValueConcrete", + ), + ("langchain", "schema", "runnable", "HubRunnable"): ( + "langchain", + "runnables", + "hub", + "HubRunnable", + ), + ("langchain", "schema", "runnable", "RunnableBindingBase"): ( + "langchain_core", + "runnables", + "base", + "RunnableBindingBase", + ), + ("langchain", "schema", "runnable", "OpenAIFunctionsRouter"): ( + "langchain", + "runnables", + "openai_functions", + "OpenAIFunctionsRouter", + ), + ("langchain", "schema", "runnable", "RouterRunnable"): ( + "langchain_core", + "runnables", + "router", + "RouterRunnable", + ), + ("langchain", "schema", "runnable", "RunnablePassthrough"): ( + "langchain_core", + "runnables", + "passthrough", + "RunnablePassthrough", + ), + ("langchain", "schema", "runnable", "RunnableSequence"): ( + "langchain_core", + "runnables", + "base", + "RunnableSequence", + ), + ("langchain", "schema", "runnable", "RunnableEach"): ( + "langchain_core", + "runnables", + "base", + "RunnableEach", + ), + ("langchain", "schema", "runnable", "RunnableEachBase"): ( + "langchain_core", + "runnables", + "base", + "RunnableEachBase", + ), + ("langchain", "schema", "runnable", "RunnableConfigurableAlternatives"): ( + "langchain_core", + "runnables", + "configurable", + "RunnableConfigurableAlternatives", + ), + ("langchain", "schema", "runnable", "RunnableConfigurableFields"): ( + "langchain_core", + "runnables", + "configurable", + "RunnableConfigurableFields", + ), + ("langchain", "schema", "runnable", "RunnableWithMessageHistory"): ( + "langchain_core", + "runnables", + "history", + "RunnableWithMessageHistory", + ), + ("langchain", "schema", "runnable", "RunnableAssign"): ( + "langchain_core", + "runnables", + "passthrough", + "RunnableAssign", + ), + ("langchain", "schema", "runnable", "RunnableRetry"): ( + "langchain_core", + "runnables", + "retry", + "RunnableRetry", + ), +} diff --git a/libs/core/langchain_core/messages/ai.py b/libs/core/langchain_core/messages/ai.py index 66504fa74e..6c9633f02e 100644 --- a/libs/core/langchain_core/messages/ai.py +++ b/libs/core/langchain_core/messages/ai.py @@ -1,4 +1,4 @@ -from typing import Any, Literal +from typing import Any, List, Literal from langchain_core.messages.base import ( BaseMessage, @@ -17,6 +17,11 @@ class AIMessage(BaseMessage): type: Literal["ai"] = "ai" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "messages"] + AIMessage.update_forward_refs() @@ -29,6 +34,11 @@ class AIMessageChunk(AIMessage, BaseMessageChunk): # non-chunk variant. type: Literal["AIMessageChunk"] = "AIMessageChunk" # type: ignore[assignment] # noqa: E501 + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "messages"] + def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore if isinstance(other, AIMessageChunk): if self.example != other.example: diff --git a/libs/core/langchain_core/messages/base.py b/libs/core/langchain_core/messages/base.py index dd9d81a57a..daed44f501 100644 --- a/libs/core/langchain_core/messages/base.py +++ b/libs/core/langchain_core/messages/base.py @@ -31,6 +31,11 @@ class BaseMessage(Serializable): """Return whether this class is serializable.""" return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "messages"] + def __add__(self, other: Any) -> ChatPromptTemplate: from langchain_core.prompts.chat import ChatPromptTemplate @@ -68,6 +73,11 @@ def merge_content( class BaseMessageChunk(BaseMessage): """A Message chunk, which can be concatenated with other Message chunks.""" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "messages"] + def _merge_kwargs_dict( self, left: Dict[str, Any], right: Dict[str, Any] ) -> Dict[str, Any]: diff --git a/libs/core/langchain_core/messages/chat.py b/libs/core/langchain_core/messages/chat.py index 173453f30c..bd89094ca4 100644 --- a/libs/core/langchain_core/messages/chat.py +++ b/libs/core/langchain_core/messages/chat.py @@ -1,4 +1,4 @@ -from typing import Any, Literal +from typing import Any, List, Literal from langchain_core.messages.base import ( BaseMessage, @@ -15,6 +15,11 @@ class ChatMessage(BaseMessage): type: Literal["chat"] = "chat" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "messages"] + ChatMessage.update_forward_refs() @@ -27,6 +32,11 @@ class ChatMessageChunk(ChatMessage, BaseMessageChunk): # non-chunk variant. type: Literal["ChatMessageChunk"] = "ChatMessageChunk" # type: ignore + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "messages"] + def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore if isinstance(other, ChatMessageChunk): if self.role != other.role: diff --git a/libs/core/langchain_core/messages/function.py b/libs/core/langchain_core/messages/function.py index a2e8f5bb82..32f89f99f1 100644 --- a/libs/core/langchain_core/messages/function.py +++ b/libs/core/langchain_core/messages/function.py @@ -1,4 +1,4 @@ -from typing import Any, Literal +from typing import Any, List, Literal from langchain_core.messages.base import ( BaseMessage, @@ -15,6 +15,11 @@ class FunctionMessage(BaseMessage): type: Literal["function"] = "function" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "messages"] + FunctionMessage.update_forward_refs() @@ -27,6 +32,11 @@ class FunctionMessageChunk(FunctionMessage, BaseMessageChunk): # non-chunk variant. type: Literal["FunctionMessageChunk"] = "FunctionMessageChunk" # type: ignore[assignment] + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "messages"] + def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore if isinstance(other, FunctionMessageChunk): if self.name != other.name: diff --git a/libs/core/langchain_core/messages/human.py b/libs/core/langchain_core/messages/human.py index 90b3fcee89..976dc7afc5 100644 --- a/libs/core/langchain_core/messages/human.py +++ b/libs/core/langchain_core/messages/human.py @@ -1,4 +1,4 @@ -from typing import Literal +from typing import List, Literal from langchain_core.messages.base import BaseMessage, BaseMessageChunk @@ -13,6 +13,11 @@ class HumanMessage(BaseMessage): type: Literal["human"] = "human" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "messages"] + HumanMessage.update_forward_refs() @@ -24,3 +29,8 @@ class HumanMessageChunk(HumanMessage, BaseMessageChunk): # to make sure that the chunk variant can be discriminated from the # non-chunk variant. type: Literal["HumanMessageChunk"] = "HumanMessageChunk" # type: ignore[assignment] # noqa: E501 + + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "messages"] diff --git a/libs/core/langchain_core/messages/system.py b/libs/core/langchain_core/messages/system.py index 0f3f0879e2..18e3753296 100644 --- a/libs/core/langchain_core/messages/system.py +++ b/libs/core/langchain_core/messages/system.py @@ -1,4 +1,4 @@ -from typing import Literal +from typing import List, Literal from langchain_core.messages.base import BaseMessage, BaseMessageChunk @@ -10,6 +10,11 @@ class SystemMessage(BaseMessage): type: Literal["system"] = "system" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "messages"] + SystemMessage.update_forward_refs() @@ -21,3 +26,8 @@ class SystemMessageChunk(SystemMessage, BaseMessageChunk): # to make sure that the chunk variant can be discriminated from the # non-chunk variant. type: Literal["SystemMessageChunk"] = "SystemMessageChunk" # type: ignore[assignment] # noqa: E501 + + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "messages"] diff --git a/libs/core/langchain_core/messages/tool.py b/libs/core/langchain_core/messages/tool.py index 3c1a19532b..27d15739c7 100644 --- a/libs/core/langchain_core/messages/tool.py +++ b/libs/core/langchain_core/messages/tool.py @@ -1,4 +1,4 @@ -from typing import Any, Literal +from typing import Any, List, Literal from langchain_core.messages.base import ( BaseMessage, @@ -15,6 +15,11 @@ class ToolMessage(BaseMessage): type: Literal["tool"] = "tool" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "messages"] + ToolMessage.update_forward_refs() @@ -27,6 +32,11 @@ class ToolMessageChunk(ToolMessage, BaseMessageChunk): # non-chunk variant. type: Literal["ToolMessageChunk"] = "ToolMessageChunk" # type: ignore[assignment] + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "messages"] + def __add__(self, other: Any) -> BaseMessageChunk: # type: ignore if isinstance(other, ToolMessageChunk): if self.tool_call_id != other.tool_call_id: diff --git a/libs/core/langchain_core/output_parsers/list.py b/libs/core/langchain_core/output_parsers/list.py index 9710ce82b9..1ad75b24bb 100644 --- a/libs/core/langchain_core/output_parsers/list.py +++ b/libs/core/langchain_core/output_parsers/list.py @@ -26,6 +26,11 @@ class CommaSeparatedListOutputParser(ListOutputParser): def is_lc_serializable(cls) -> bool: return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "output_parsers", "list"] + def get_format_instructions(self) -> str: return ( "Your response should be a list of comma separated values, " diff --git a/libs/core/langchain_core/output_parsers/string.py b/libs/core/langchain_core/output_parsers/string.py index 704b21472a..12350bf0d8 100644 --- a/libs/core/langchain_core/output_parsers/string.py +++ b/libs/core/langchain_core/output_parsers/string.py @@ -1,3 +1,5 @@ +from typing import List + from langchain_core.output_parsers.transform import BaseTransformOutputParser @@ -9,6 +11,11 @@ class StrOutputParser(BaseTransformOutputParser[str]): """Return whether this class is serializable.""" return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "output_parser"] + @property def _type(self) -> str: """Return the output parser type for serialization.""" diff --git a/libs/core/langchain_core/outputs/chat_generation.py b/libs/core/langchain_core/outputs/chat_generation.py index a604996410..fa5041c348 100644 --- a/libs/core/langchain_core/outputs/chat_generation.py +++ b/libs/core/langchain_core/outputs/chat_generation.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, Literal +from typing import Any, Dict, List, Literal from langchain_core.messages import BaseMessage, BaseMessageChunk from langchain_core.outputs.generation import Generation @@ -27,6 +27,11 @@ class ChatGeneration(Generation): raise ValueError("Error while initializing ChatGeneration") from e return values + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "output"] + class ChatGenerationChunk(ChatGeneration): """A ChatGeneration chunk, which can be concatenated with other @@ -41,6 +46,11 @@ class ChatGenerationChunk(ChatGeneration): type: Literal["ChatGenerationChunk"] = "ChatGenerationChunk" # type: ignore[assignment] # noqa: E501 """Type is used exclusively for serialization purposes.""" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "output"] + def __add__(self, other: ChatGenerationChunk) -> ChatGenerationChunk: if isinstance(other, ChatGenerationChunk): generation_info = ( diff --git a/libs/core/langchain_core/outputs/generation.py b/libs/core/langchain_core/outputs/generation.py index 67446acca4..3ede28f9fc 100644 --- a/libs/core/langchain_core/outputs/generation.py +++ b/libs/core/langchain_core/outputs/generation.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, Literal, Optional +from typing import Any, Dict, List, Literal, Optional from langchain_core.load import Serializable @@ -24,10 +24,20 @@ class Generation(Serializable): """Return whether this class is serializable.""" return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "output"] + class GenerationChunk(Generation): """A Generation chunk, which can be concatenated with other Generation chunks.""" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "output"] + def __add__(self, other: GenerationChunk) -> GenerationChunk: if isinstance(other, GenerationChunk): generation_info = ( diff --git a/libs/core/langchain_core/prompt_values.py b/libs/core/langchain_core/prompt_values.py index 22e02e30ba..d0d1a10473 100644 --- a/libs/core/langchain_core/prompt_values.py +++ b/libs/core/langchain_core/prompt_values.py @@ -24,6 +24,11 @@ class PromptValue(Serializable, ABC): """Return whether this class is serializable.""" return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "prompt"] + @abstractmethod def to_string(self) -> str: """Return prompt value as string.""" @@ -40,6 +45,11 @@ class StringPromptValue(PromptValue): """Prompt text.""" type: Literal["StringPromptValue"] = "StringPromptValue" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "prompts", "base"] + def to_string(self) -> str: """Return prompt as string.""" return self.text @@ -66,6 +76,11 @@ class ChatPromptValue(PromptValue): """Return prompt as a list of messages.""" return list(self.messages) + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "prompts", "chat"] + class ChatPromptValueConcrete(ChatPromptValue): """Chat prompt value which explicitly lists out the message types it accepts. @@ -74,3 +89,8 @@ class ChatPromptValueConcrete(ChatPromptValue): messages: Sequence[AnyMessage] type: Literal["ChatPromptValueConcrete"] = "ChatPromptValueConcrete" + + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "prompts", "chat"] diff --git a/libs/core/langchain_core/prompts/base.py b/libs/core/langchain_core/prompts/base.py index 21b31650ff..1cff41200f 100644 --- a/libs/core/langchain_core/prompts/base.py +++ b/libs/core/langchain_core/prompts/base.py @@ -44,6 +44,11 @@ class BasePromptTemplate(RunnableSerializable[Dict, PromptValue], ABC): default_factory=dict ) + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "prompt_template"] + @classmethod def is_lc_serializable(cls) -> bool: """Return whether this class is serializable.""" diff --git a/libs/core/langchain_core/prompts/chat.py b/libs/core/langchain_core/prompts/chat.py index fc10ef179c..2b24bf3d33 100644 --- a/libs/core/langchain_core/prompts/chat.py +++ b/libs/core/langchain_core/prompts/chat.py @@ -43,6 +43,11 @@ class BaseMessagePromptTemplate(Serializable, ABC): """Return whether or not the class is serializable.""" return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "prompts", "chat"] + @abstractmethod def format_messages(self, **kwargs: Any) -> List[BaseMessage]: """Format messages from kwargs. Should return a list of BaseMessages. @@ -82,6 +87,11 @@ class MessagesPlaceholder(BaseMessagePromptTemplate): variable_name: str """Name of variable to use as messages.""" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "prompts", "chat"] + def __init__(self, variable_name: str, **kwargs: Any): return super().__init__(variable_name=variable_name, **kwargs) @@ -132,6 +142,11 @@ class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC): additional_kwargs: dict = Field(default_factory=dict) """Additional keyword arguments to pass to the prompt template.""" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "prompts", "chat"] + @classmethod def from_template( cls: Type[MessagePromptTemplateT], @@ -221,6 +236,11 @@ class ChatMessagePromptTemplate(BaseStringMessagePromptTemplate): role: str """Role of the message.""" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "prompts", "chat"] + def format(self, **kwargs: Any) -> BaseMessage: """Format the prompt template. @@ -239,6 +259,11 @@ class ChatMessagePromptTemplate(BaseStringMessagePromptTemplate): class HumanMessagePromptTemplate(BaseStringMessagePromptTemplate): """Human message prompt template. This is a message sent from the user.""" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "prompts", "chat"] + def format(self, **kwargs: Any) -> BaseMessage: """Format the prompt template. @@ -255,6 +280,11 @@ class HumanMessagePromptTemplate(BaseStringMessagePromptTemplate): class AIMessagePromptTemplate(BaseStringMessagePromptTemplate): """AI message prompt template. This is a message sent from the AI.""" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "prompts", "chat"] + def format(self, **kwargs: Any) -> BaseMessage: """Format the prompt template. @@ -273,6 +303,11 @@ class SystemMessagePromptTemplate(BaseStringMessagePromptTemplate): This is a message that is not sent to the user. """ + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "prompts", "chat"] + def format(self, **kwargs: Any) -> BaseMessage: """Format the prompt template. @@ -368,6 +403,11 @@ class ChatPromptTemplate(BaseChatPromptTemplate): validate_template: bool = False """Whether or not to try validating the template.""" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "prompts", "chat"] + def __add__(self, other: Any) -> ChatPromptTemplate: """Combine two prompt templates. diff --git a/libs/core/langchain_core/prompts/few_shot_with_templates.py b/libs/core/langchain_core/prompts/few_shot_with_templates.py index 18c9c7df63..f1b6fbc318 100644 --- a/libs/core/langchain_core/prompts/few_shot_with_templates.py +++ b/libs/core/langchain_core/prompts/few_shot_with_templates.py @@ -42,6 +42,11 @@ class FewShotPromptWithTemplates(StringPromptTemplate): validate_template: bool = False """Whether or not to try validating the template.""" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "prompts", "few_shot_with_templates"] + @root_validator(pre=True) def check_examples_and_selector(cls, values: Dict) -> Dict: """Check that one and only one of examples/example_selector are provided.""" diff --git a/libs/core/langchain_core/prompts/pipeline.py b/libs/core/langchain_core/prompts/pipeline.py index 7cf13f6896..48f5ec4582 100644 --- a/libs/core/langchain_core/prompts/pipeline.py +++ b/libs/core/langchain_core/prompts/pipeline.py @@ -28,6 +28,11 @@ class PipelinePromptTemplate(BasePromptTemplate): pipeline_prompts: List[Tuple[str, BasePromptTemplate]] """A list of tuples, consisting of a string (`name`) and a Prompt Template.""" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "prompts", "pipeline"] + @root_validator(pre=True) def get_input_variables(cls, values: Dict) -> Dict: """Get input variables.""" diff --git a/libs/core/langchain_core/prompts/prompt.py b/libs/core/langchain_core/prompts/prompt.py index 566cf02bd2..31bb7f30af 100644 --- a/libs/core/langchain_core/prompts/prompt.py +++ b/libs/core/langchain_core/prompts/prompt.py @@ -54,6 +54,11 @@ class PromptTemplate(StringPromptTemplate): "template_format": self.template_format, } + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "prompts", "prompt"] + input_variables: List[str] """A list of the names of the variables the prompt template expects.""" diff --git a/libs/core/langchain_core/prompts/string.py b/libs/core/langchain_core/prompts/string.py index e454b8280a..5981fe9988 100644 --- a/libs/core/langchain_core/prompts/string.py +++ b/libs/core/langchain_core/prompts/string.py @@ -151,6 +151,11 @@ def get_template_variables(template: str, template_format: str) -> List[str]: class StringPromptTemplate(BasePromptTemplate, ABC): """String prompt that exposes the format method, returning a prompt.""" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "prompts", "base"] + def format_prompt(self, **kwargs: Any) -> PromptValue: """Create Chat Messages.""" return StringPromptValue(text=self.format(**kwargs)) diff --git a/libs/core/langchain_core/runnables/base.py b/libs/core/langchain_core/runnables/base.py index de28dbb83f..ed90ca09f4 100644 --- a/libs/core/langchain_core/runnables/base.py +++ b/libs/core/langchain_core/runnables/base.py @@ -1349,6 +1349,11 @@ class RunnableSequence(RunnableSerializable[Input, Output]): last: Runnable[Any, Output] """The last runnable in the sequence.""" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "runnable"] + @property def steps(self) -> List[Runnable[Any, Any]]: """All the runnables that make up the sequence in order.""" @@ -1358,10 +1363,6 @@ class RunnableSequence(RunnableSerializable[Input, Output]): def is_lc_serializable(cls) -> bool: return True - @classmethod - def get_lc_namespace(cls) -> List[str]: - return cls.__module__.split(".")[:-1] - class Config: arbitrary_types_allowed = True @@ -1939,7 +1940,8 @@ class RunnableParallel(RunnableSerializable[Input, Dict[str, Any]]): @classmethod def get_lc_namespace(cls) -> List[str]: - return cls.__module__.split(".")[:-1] + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "runnable"] class Config: arbitrary_types_allowed = True @@ -2705,7 +2707,8 @@ class RunnableEachBase(RunnableSerializable[List[Input], List[Output]]): @classmethod def get_lc_namespace(cls) -> List[str]: - return cls.__module__.split(".")[:-1] + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "runnable"] def _invoke( self, @@ -2746,6 +2749,11 @@ class RunnableEach(RunnableEachBase[Input, Output]): with each element of the input sequence. """ + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "runnable"] + def bind(self, **kwargs: Any) -> RunnableEach[Input, Output]: return RunnableEach(bound=self.bound.bind(**kwargs)) @@ -2910,7 +2918,8 @@ class RunnableBindingBase(RunnableSerializable[Input, Output]): @classmethod def get_lc_namespace(cls) -> List[str]: - return cls.__module__.split(".")[:-1] + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "runnable"] def _merge_configs(self, *configs: Optional[RunnableConfig]) -> RunnableConfig: config = merge_configs(self.config, *configs) @@ -3086,6 +3095,11 @@ class RunnableBinding(RunnableBindingBase[Input, Output]): runnable_binding.invoke('Say "Parrot-MAGIC"') # Should return `Parrot` """ + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "runnable"] + def bind(self, **kwargs: Any) -> Runnable[Input, Output]: """Bind additional kwargs to a Runnable, returning a new Runnable. diff --git a/libs/core/langchain_core/runnables/branch.py b/libs/core/langchain_core/runnables/branch.py index 5f7c1b009d..a3255082ff 100644 --- a/libs/core/langchain_core/runnables/branch.py +++ b/libs/core/langchain_core/runnables/branch.py @@ -132,8 +132,8 @@ class RunnableBranch(RunnableSerializable[Input, Output]): @classmethod def get_lc_namespace(cls) -> List[str]: - """The namespace of a RunnableBranch is the namespace of its default branch.""" - return cls.__module__.split(".")[:-1] + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "runnable"] def get_input_schema( self, config: Optional[RunnableConfig] = None diff --git a/libs/core/langchain_core/runnables/configurable.py b/libs/core/langchain_core/runnables/configurable.py index ef7f546bbc..005b013b3f 100644 --- a/libs/core/langchain_core/runnables/configurable.py +++ b/libs/core/langchain_core/runnables/configurable.py @@ -53,7 +53,8 @@ class DynamicRunnable(RunnableSerializable[Input, Output]): @classmethod def get_lc_namespace(cls) -> List[str]: - return cls.__module__.split(".")[:-1] + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "runnable"] @property def InputType(self) -> Type[Input]: @@ -217,6 +218,11 @@ class RunnableConfigurableFields(DynamicRunnable[Input, Output]): fields: Dict[str, AnyConfigurableField] + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "runnable"] + @property def config_specs(self) -> List[ConfigurableFieldSpec]: return get_unique_config_specs( @@ -318,6 +324,11 @@ class RunnableConfigurableAlternatives(DynamicRunnable[Input, Output]): of the form ==, eg. a key named "temperature" used by the alternative named "gpt3" becomes "model==gpt3/temperature".""" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "runnable"] + @property def config_specs(self) -> List[ConfigurableFieldSpec]: with _enums_for_spec_lock: diff --git a/libs/core/langchain_core/runnables/fallbacks.py b/libs/core/langchain_core/runnables/fallbacks.py index 999aa1ae21..5f6dbf11bf 100644 --- a/libs/core/langchain_core/runnables/fallbacks.py +++ b/libs/core/langchain_core/runnables/fallbacks.py @@ -125,7 +125,8 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]): @classmethod def get_lc_namespace(cls) -> List[str]: - return cls.__module__.split(".")[:-1] + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "runnable"] @property def runnables(self) -> Iterator[Runnable[Input, Output]]: diff --git a/libs/core/langchain_core/runnables/history.py b/libs/core/langchain_core/runnables/history.py index 919d476fb4..98c6b1a091 100644 --- a/libs/core/langchain_core/runnables/history.py +++ b/libs/core/langchain_core/runnables/history.py @@ -86,6 +86,11 @@ class RunnableWithMessageHistory(RunnableBindingBase): output_messages_key: Optional[str] = None history_messages_key: Optional[str] = None + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "runnable"] + def __init__( self, runnable: Runnable[ diff --git a/libs/core/langchain_core/runnables/passthrough.py b/libs/core/langchain_core/runnables/passthrough.py index 1b7242f28e..db446cd7d3 100644 --- a/libs/core/langchain_core/runnables/passthrough.py +++ b/libs/core/langchain_core/runnables/passthrough.py @@ -167,7 +167,8 @@ class RunnablePassthrough(RunnableSerializable[Other, Other]): @classmethod def get_lc_namespace(cls) -> List[str]: - return cls.__module__.split(".")[:-1] + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "runnable"] @property def InputType(self) -> Any: @@ -312,7 +313,8 @@ class RunnableAssign(RunnableSerializable[Dict[str, Any], Dict[str, Any]]): @classmethod def get_lc_namespace(cls) -> List[str]: - return cls.__module__.split(".")[:-1] + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "runnable"] def get_input_schema( self, config: Optional[RunnableConfig] = None diff --git a/libs/core/langchain_core/runnables/retry.py b/libs/core/langchain_core/runnables/retry.py index 7aeb974648..f619c45f59 100644 --- a/libs/core/langchain_core/runnables/retry.py +++ b/libs/core/langchain_core/runnables/retry.py @@ -114,6 +114,11 @@ class RunnableRetry(RunnableBindingBase[Input, Output]): max_attempt_number: int = 3 """The maximum number of attempts to retry the runnable.""" + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "runnable"] + @property def _kwargs_retrying(self) -> Dict[str, Any]: kwargs: Dict[str, Any] = dict() diff --git a/libs/core/langchain_core/runnables/router.py b/libs/core/langchain_core/runnables/router.py index 0413d8a911..eacdfb3781 100644 --- a/libs/core/langchain_core/runnables/router.py +++ b/libs/core/langchain_core/runnables/router.py @@ -77,7 +77,8 @@ class RouterRunnable(RunnableSerializable[RouterInput, Output]): @classmethod def get_lc_namespace(cls) -> List[str]: - return cls.__module__.split(".")[:-1] + """Get the namespace of the langchain object.""" + return ["langchain", "schema", "runnable"] def invoke( self, input: RouterInput, config: Optional[RunnableConfig] = None diff --git a/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr b/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr index 725f917a99..7ae4a3d94a 100644 --- a/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr +++ b/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr @@ -5,8 +5,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -14,7 +15,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "ChatPromptTemplate" @@ -25,7 +26,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "SystemMessagePromptTemplate" @@ -35,7 +36,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -53,7 +54,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "HumanMessagePromptTemplate" @@ -63,7 +64,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -103,7 +104,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "output_parsers", "list", "CommaSeparatedListOutputParser" @@ -120,8 +121,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -141,7 +143,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "ChatPromptTemplate" @@ -152,7 +154,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "SystemMessagePromptTemplate" @@ -162,7 +164,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -180,7 +182,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "HumanMessagePromptTemplate" @@ -190,7 +192,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -229,7 +231,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "output_parsers", "list", "CommaSeparatedListOutputParser" @@ -246,8 +248,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -255,7 +258,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "ChatPromptTemplate" @@ -266,7 +269,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "SystemMessagePromptTemplate" @@ -276,7 +279,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -294,7 +297,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "HumanMessagePromptTemplate" @@ -304,7 +307,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -343,7 +346,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "output_parsers", "list", "CommaSeparatedListOutputParser" @@ -365,7 +368,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "ChatPromptTemplate" @@ -376,7 +379,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "SystemMessagePromptTemplate" @@ -386,7 +389,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -404,7 +407,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "HumanMessagePromptTemplate" @@ -414,7 +417,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -453,7 +456,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "output_parsers", "list", "CommaSeparatedListOutputParser" @@ -466,7 +469,7 @@ # --- # name: test_combining_sequences.3 list([ - Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, {'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': "RunnableLambda(lambda x: {'question': x[0] + x[1]})"}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nicer assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['baz, qux'])"}], 'last': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': ['baz', 'qux']}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompt_values', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo, bar'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo, bar', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'ai', 'AIMessage'], 'kwargs': {'content': 'foo, bar'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': AIMessage(content='foo, bar')}, outputs={'output': ['foo', 'bar']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000004'), name='', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': "RunnableLambda(lambda x: {'question': x[0] + x[1]})"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': ['foo', 'bar']}, outputs={'question': 'foobar'}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:4'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000005'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nicer assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'foobar'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompt_values', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nicer assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'foobar', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:5'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000006'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['baz, qux'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['baz, qux'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nicer assistant.\nHuman: foobar']}, outputs={'generations': [[{'text': 'baz, qux', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'ai', 'AIMessage'], 'kwargs': {'content': 'baz, qux'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:6'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000007'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': AIMessage(content='baz, qux')}, outputs={'output': ['baz', 'qux']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:7'], execution_order=None, child_execution_order=None, child_runs=[])]), + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, {'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': "RunnableLambda(lambda x: {'question': x[0] + x[1]})"}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nicer assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['baz, qux'])"}], 'last': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': ['baz', 'qux']}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo, bar'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo, bar', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'AIMessage'], 'kwargs': {'content': 'foo, bar'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': AIMessage(content='foo, bar')}, outputs={'output': ['foo', 'bar']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000004'), name='', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': "RunnableLambda(lambda x: {'question': x[0] + x[1]})"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': ['foo', 'bar']}, outputs={'question': 'foobar'}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:4'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000005'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nicer assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'foobar'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nicer assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'foobar', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:5'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000006'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['baz, qux'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['baz, qux'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nicer assistant.\nHuman: foobar']}, outputs={'generations': [[{'text': 'baz, qux', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'AIMessage'], 'kwargs': {'content': 'baz, qux'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:6'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000007'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': AIMessage(content='baz, qux')}, outputs={'output': ['baz', 'qux']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:7'], execution_order=None, child_execution_order=None, child_runs=[])]), ]) # --- # name: test_each @@ -475,8 +478,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -484,7 +488,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "ChatPromptTemplate" @@ -495,7 +499,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "SystemMessagePromptTemplate" @@ -505,7 +509,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -523,7 +527,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "HumanMessagePromptTemplate" @@ -533,7 +537,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -585,8 +589,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableEach" ], "kwargs": { @@ -614,8 +619,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -623,8 +629,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableParallel" ], "kwargs": { @@ -644,8 +651,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableParallel" ], "kwargs": { @@ -688,8 +696,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -697,8 +706,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableParallel" ], "kwargs": { @@ -721,8 +731,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableWithFallbacks" ], "kwargs": { @@ -730,8 +741,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -739,7 +751,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -772,8 +784,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -781,7 +794,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -833,8 +846,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableWithFallbacks" ], "kwargs": { @@ -885,8 +899,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableWithFallbacks" ], "kwargs": { @@ -955,8 +970,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -964,7 +980,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "ChatPromptTemplate" @@ -975,7 +991,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "SystemMessagePromptTemplate" @@ -985,7 +1001,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -1003,7 +1019,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "HumanMessagePromptTemplate" @@ -1013,7 +1029,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -1053,7 +1069,7 @@ # --- # name: test_prompt_with_chat_model.2 list([ - Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo'])"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': AIMessage(content='foo')}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompt_values', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'ai', 'AIMessage'], 'kwargs': {'content': 'foo'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo'])"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': AIMessage(content='foo')}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'AIMessage'], 'kwargs': {'content': 'foo'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), ]) # --- # name: test_prompt_with_chat_model_and_parser @@ -1062,8 +1078,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -1071,7 +1088,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "ChatPromptTemplate" @@ -1082,7 +1099,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "SystemMessagePromptTemplate" @@ -1092,7 +1109,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -1110,7 +1127,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "HumanMessagePromptTemplate" @@ -1120,7 +1137,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -1160,7 +1177,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "output_parsers", "list", "CommaSeparatedListOutputParser" @@ -1173,7 +1190,7 @@ # --- # name: test_prompt_with_chat_model_and_parser.1 list([ - Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}], 'last': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': ['foo', 'bar']}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompt_values', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo, bar'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo, bar', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'ai', 'AIMessage'], 'kwargs': {'content': 'foo, bar'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': AIMessage(content='foo, bar')}, outputs={'output': ['foo', 'bar']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[])]), + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}], 'last': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': ['foo', 'bar']}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo, bar'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo, bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo, bar', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'AIMessage'], 'kwargs': {'content': 'foo, bar'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='CommaSeparatedListOutputParser', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='parser', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'output_parsers', 'list', 'CommaSeparatedListOutputParser'], 'kwargs': {}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': AIMessage(content='foo, bar')}, outputs={'output': ['foo', 'bar']}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[])]), ]) # --- # name: test_prompt_with_chat_model_async @@ -1188,8 +1205,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -1197,7 +1215,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "ChatPromptTemplate" @@ -1208,7 +1226,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "SystemMessagePromptTemplate" @@ -1218,7 +1236,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -1236,7 +1254,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "HumanMessagePromptTemplate" @@ -1246,7 +1264,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -1286,7 +1304,7 @@ # --- # name: test_prompt_with_chat_model_async.2 list([ - Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo'])"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': AIMessage(content='foo')}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompt_values', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'ai', 'AIMessage'], 'kwargs': {'content': 'foo'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo'])"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': AIMessage(content='foo')}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListChatModel', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo'], '_type': 'fake-list-chat-model', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'chat_model', 'FakeListChatModel'], 'repr': "FakeListChatModel(responses=['foo'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'ChatGeneration', 'message': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'AIMessage'], 'kwargs': {'content': 'foo'}}}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), ]) # --- # name: test_prompt_with_llm @@ -1295,8 +1313,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -1304,7 +1323,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "ChatPromptTemplate" @@ -1315,7 +1334,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "SystemMessagePromptTemplate" @@ -1325,7 +1344,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -1343,7 +1362,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "HumanMessagePromptTemplate" @@ -1353,7 +1372,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -1393,13 +1412,13 @@ # --- # name: test_prompt_with_llm.1 list([ - Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompt_values', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), ]) # --- # name: test_prompt_with_llm.2 list([ - Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'], i=1)"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': 'bar'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompt_values', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}, 'batch_size': 2}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'], i=1)"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'bar', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), - Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'], i=1)"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your favorite color?'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000004'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your favorite color?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompt_values', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'What is your favorite color?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000003'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000005'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}, 'batch_size': 2}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'], i=1)"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your favorite color?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000003'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'], i=1)"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': 'bar'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}, 'batch_size': 2}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'], i=1)"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'bar', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), + Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'], i=1)"}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your favorite color?'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000004'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your favorite color?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your favorite color?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000003'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000005'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}, 'batch_size': 2}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'], i=1)"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your favorite color?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000003'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[])]), ]) # --- # name: test_prompt_with_llm_and_async_lambda @@ -1408,8 +1427,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -1417,7 +1437,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "ChatPromptTemplate" @@ -1428,7 +1448,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "SystemMessagePromptTemplate" @@ -1438,7 +1458,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -1456,7 +1476,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "HumanMessagePromptTemplate" @@ -1466,7 +1486,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -1519,7 +1539,7 @@ # --- # name: test_prompt_with_llm_and_async_lambda.1 list([ - Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'runnables', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}], 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': 'RunnableLambda(afunc=...)'}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'prompt_values', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'system', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain_core', 'messages', 'human', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='passthrough', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': 'RunnableLambda(afunc=...)'}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': 'foo'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[])]), + Run(id=UUID('00000000-0000-4000-8000-000000000000'), name='RunnableSequence', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'runnable', 'RunnableSequence'], 'kwargs': {'first': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, 'middle': [{'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}], 'last': {'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': 'RunnableLambda(afunc=...)'}}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=None, tags=[], execution_order=None, child_execution_order=None, child_runs=[Run(id=UUID('00000000-0000-4000-8000-000000000001'), name='ChatPromptTemplate', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='prompt', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptTemplate'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'SystemMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': [], 'template': 'You are a nice assistant.', 'template_format': 'f-string', 'partial_variables': {}}}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'HumanMessagePromptTemplate'], 'kwargs': {'prompt': {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'prompt', 'PromptTemplate'], 'kwargs': {'input_variables': ['question'], 'template': '{question}', 'template_format': 'f-string', 'partial_variables': {}}}}}], 'input_variables': ['question']}}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'question': 'What is your name?'}, outputs={'lc': 1, 'type': 'constructor', 'id': ['langchain', 'prompts', 'chat', 'ChatPromptValue'], 'kwargs': {'messages': [{'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'SystemMessage'], 'kwargs': {'content': 'You are a nice assistant.', 'additional_kwargs': {}}}, {'lc': 1, 'type': 'constructor', 'id': ['langchain', 'schema', 'messages', 'HumanMessage'], 'kwargs': {'content': 'What is your name?', 'additional_kwargs': {}}}]}}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:1'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000002'), name='FakeListLLM', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='llm', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={'invocation_params': {'responses': ['foo', 'bar'], '_type': 'fake-list', 'stop': None}, 'options': {'stop': None}, 'batch_size': 1}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['tests', 'unit_tests', 'fake', 'llm', 'FakeListLLM'], 'repr': "FakeListLLM(responses=['foo', 'bar'])"}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'prompts': ['System: You are a nice assistant.\nHuman: What is your name?']}, outputs={'generations': [[{'text': 'foo', 'generation_info': None, 'type': 'Generation'}]], 'llm_output': None, 'run': None}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:2'], execution_order=None, child_execution_order=None, child_runs=[]), Run(id=UUID('00000000-0000-4000-8000-000000000003'), name='passthrough', start_time=FakeDatetime(2023, 1, 1, 0, 0), run_type='chain', end_time=FakeDatetime(2023, 1, 1, 0, 0), extra={}, error=None, serialized={'lc': 1, 'type': 'not_implemented', 'id': ['langchain_core', 'runnables', 'base', 'RunnableLambda'], 'repr': 'RunnableLambda(afunc=...)'}, events=[{'name': 'start', 'time': FakeDatetime(2023, 1, 1, 0, 0)}, {'name': 'end', 'time': FakeDatetime(2023, 1, 1, 0, 0)}], inputs={'input': 'foo'}, outputs={'output': 'foo'}, reference_example_id=None, parent_run_id=UUID('00000000-0000-4000-8000-000000000000'), tags=['seq:step:3'], execution_order=None, child_execution_order=None, child_runs=[])]), ]) # --- # name: test_router_runnable @@ -1528,8 +1548,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -1537,8 +1558,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableParallel" ], "kwargs": { @@ -1558,8 +1580,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableParallel" ], "kwargs": { @@ -1585,8 +1608,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RouterRunnable" ], "kwargs": { @@ -1595,8 +1619,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -1604,7 +1629,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "ChatPromptTemplate" @@ -1618,7 +1643,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "HumanMessagePromptTemplate" @@ -1628,7 +1653,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -1665,8 +1690,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -1674,7 +1700,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "ChatPromptTemplate" @@ -1688,7 +1714,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "HumanMessagePromptTemplate" @@ -1698,7 +1724,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -4300,8 +4326,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -4309,8 +4336,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableParallel" ], "kwargs": { @@ -4319,8 +4347,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -4328,8 +4357,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnablePassthrough" ], "kwargs": { @@ -4355,8 +4385,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -4404,7 +4435,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "ChatPromptTemplate" @@ -4415,7 +4446,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "SystemMessagePromptTemplate" @@ -4425,7 +4456,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -4443,7 +4474,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "HumanMessagePromptTemplate" @@ -4453,7 +4484,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -4494,7 +4525,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "output_parsers", "list", "CommaSeparatedListOutputParser" @@ -4521,8 +4552,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -4530,7 +4562,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "ChatPromptTemplate" @@ -4541,7 +4573,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "SystemMessagePromptTemplate" @@ -4551,7 +4583,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -4569,7 +4601,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "HumanMessagePromptTemplate" @@ -4579,7 +4611,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -4618,8 +4650,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableParallel" ], "kwargs": { @@ -4661,8 +4694,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableSequence" ], "kwargs": { @@ -4670,7 +4704,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "ChatPromptTemplate" @@ -4681,7 +4715,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "SystemMessagePromptTemplate" @@ -4691,7 +4725,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -4709,7 +4743,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "HumanMessagePromptTemplate" @@ -4719,7 +4753,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -4758,8 +4792,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableParallel" ], "kwargs": { @@ -4768,8 +4803,9 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", - "runnables", + "langchain", + "schema", + "runnable", "RunnableBinding" ], "kwargs": { diff --git a/libs/core/tests/unit_tests/runnables/test_runnable.py b/libs/core/tests/unit_tests/runnables/test_runnable.py index bca88f7572..39b8d68b92 100644 --- a/libs/core/tests/unit_tests/runnables/test_runnable.py +++ b/libs/core/tests/unit_tests/runnables/test_runnable.py @@ -2029,7 +2029,7 @@ async def test_prompt_with_llm( ): del op["value"]["id"] - assert stream_log == [ + expected = [ RunLogPatch( { "op": "replace", @@ -2113,6 +2113,7 @@ async def test_prompt_with_llm( {"op": "replace", "path": "/final_output", "value": "foo"}, ), ] + assert stream_log == expected @freeze_time("2023-01-01") diff --git a/libs/langchain/langchain/chat_models/anthropic.py b/libs/langchain/langchain/chat_models/anthropic.py index 3e16835f1e..65832a026c 100644 --- a/libs/langchain/langchain/chat_models/anthropic.py +++ b/libs/langchain/langchain/chat_models/anthropic.py @@ -105,6 +105,11 @@ class ChatAnthropic(BaseChatModel, _AnthropicCommon): """Return whether this model can be serialized by Langchain.""" return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "chat_models", "anthropic"] + def _convert_messages_to_prompt(self, messages: List[BaseMessage]) -> str: """Format a list of messages into a full prompt for the Anthropic model Args: diff --git a/libs/langchain/langchain/chat_models/azure_openai.py b/libs/langchain/langchain/chat_models/azure_openai.py index 2b9cb1b5f6..bf6e2c91f4 100644 --- a/libs/langchain/langchain/chat_models/azure_openai.py +++ b/libs/langchain/langchain/chat_models/azure_openai.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging import os import warnings -from typing import Any, Callable, Dict, Union +from typing import Any, Callable, Dict, List, Union from langchain_core.outputs import ChatResult from langchain_core.pydantic_v1 import BaseModel, Field, root_validator @@ -94,6 +94,11 @@ class AzureChatOpenAI(ChatOpenAI): infer if it is a base_url or azure_endpoint and update accordingly. """ + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "chat_models", "azure_openai"] + @root_validator() def validate_environment(cls, values: Dict) -> Dict: """Validate that api key and python package exists in environment.""" diff --git a/libs/langchain/langchain/chat_models/bedrock.py b/libs/langchain/langchain/chat_models/bedrock.py index ac4e256e97..a98b667953 100644 --- a/libs/langchain/langchain/chat_models/bedrock.py +++ b/libs/langchain/langchain/chat_models/bedrock.py @@ -50,6 +50,11 @@ class BedrockChat(BaseChatModel, BedrockBase): """Return whether this model can be serialized by Langchain.""" return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "chat_models", "bedrock"] + @property def lc_attributes(self) -> Dict[str, Any]: attributes: Dict[str, Any] = {} diff --git a/libs/langchain/langchain/chat_models/fireworks.py b/libs/langchain/langchain/chat_models/fireworks.py index 06e9dacdbc..2f1ba78bfc 100644 --- a/libs/langchain/langchain/chat_models/fireworks.py +++ b/libs/langchain/langchain/chat_models/fireworks.py @@ -101,6 +101,11 @@ class ChatFireworks(BaseChatModel): def is_lc_serializable(cls) -> bool: return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "chat_models", "fireworks"] + @root_validator() def validate_environment(cls, values: Dict) -> Dict: """Validate that api key in environment.""" diff --git a/libs/langchain/langchain/chat_models/google_palm.py b/libs/langchain/langchain/chat_models/google_palm.py index 52764b16a7..51e91e2ce3 100644 --- a/libs/langchain/langchain/chat_models/google_palm.py +++ b/libs/langchain/langchain/chat_models/google_palm.py @@ -256,6 +256,11 @@ class ChatGooglePalm(BaseChatModel, BaseModel): def is_lc_serializable(self) -> bool: return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "chat_models", "google_palm"] + @root_validator() def validate_environment(cls, values: Dict) -> Dict: """Validate api key, python package exists, temperature, top_p, and top_k.""" diff --git a/libs/langchain/langchain/chat_models/openai.py b/libs/langchain/langchain/chat_models/openai.py index 74c24066cb..8c6238019c 100644 --- a/libs/langchain/langchain/chat_models/openai.py +++ b/libs/langchain/langchain/chat_models/openai.py @@ -160,6 +160,11 @@ class ChatOpenAI(BaseChatModel): def lc_secrets(self) -> Dict[str, str]: return {"openai_api_key": "OPENAI_API_KEY"} + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "chat_models", "openai"] + @property def lc_attributes(self) -> Dict[str, Any]: attributes: Dict[str, Any] = {} diff --git a/libs/langchain/langchain/chat_models/vertexai.py b/libs/langchain/langchain/chat_models/vertexai.py index 2902945cc2..9b8993d76b 100644 --- a/libs/langchain/langchain/chat_models/vertexai.py +++ b/libs/langchain/langchain/chat_models/vertexai.py @@ -127,6 +127,11 @@ class ChatVertexAI(_VertexAICommon, BaseChatModel): def is_lc_serializable(self) -> bool: return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "chat_models", "vertexai"] + @root_validator() def validate_environment(cls, values: Dict) -> Dict: """Validate that the python package exists in environment.""" diff --git a/libs/langchain/langchain/llms/bedrock.py b/libs/langchain/langchain/llms/bedrock.py index 020847fcce..6b2fcf677c 100644 --- a/libs/langchain/langchain/llms/bedrock.py +++ b/libs/langchain/langchain/llms/bedrock.py @@ -357,6 +357,11 @@ class Bedrock(LLM, BedrockBase): """Return whether this model can be serialized by Langchain.""" return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "llms", "bedrock"] + @property def lc_attributes(self) -> Dict[str, Any]: attributes: Dict[str, Any] = {} diff --git a/libs/langchain/langchain/llms/fireworks.py b/libs/langchain/langchain/llms/fireworks.py index 7ba2be0fb7..6905ad0cbc 100644 --- a/libs/langchain/langchain/llms/fireworks.py +++ b/libs/langchain/langchain/llms/fireworks.py @@ -51,6 +51,11 @@ class Fireworks(BaseLLM): def is_lc_serializable(cls) -> bool: return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "llms", "fireworks"] + @root_validator() def validate_environment(cls, values: Dict) -> Dict: """Validate that api key in environment.""" diff --git a/libs/langchain/langchain/llms/google_palm.py b/libs/langchain/langchain/llms/google_palm.py index 16491df1f9..fe404368b2 100644 --- a/libs/langchain/langchain/llms/google_palm.py +++ b/libs/langchain/langchain/llms/google_palm.py @@ -75,6 +75,11 @@ class GooglePalm(BaseLLM, BaseModel): def is_lc_serializable(self) -> bool: return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "llms", "google_palm"] + @root_validator() def validate_environment(cls, values: Dict) -> Dict: """Validate api key, python package exists.""" diff --git a/libs/langchain/langchain/llms/openai.py b/libs/langchain/langchain/llms/openai.py index 9ee841f3d0..5835cdc50a 100644 --- a/libs/langchain/langchain/llms/openai.py +++ b/libs/langchain/langchain/llms/openai.py @@ -149,6 +149,11 @@ class BaseOpenAI(BaseLLM): def lc_secrets(self) -> Dict[str, str]: return {"openai_api_key": "OPENAI_API_KEY"} + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "llms", "openai"] + @property def lc_attributes(self) -> Dict[str, Any]: attributes: Dict[str, Any] = {} @@ -736,6 +741,11 @@ class OpenAI(BaseOpenAI): openai = OpenAI(model_name="text-davinci-003") """ + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "llms", "openai"] + @property def _invocation_params(self) -> Dict[str, Any]: return {**{"model": self.model_name}, **super()._invocation_params} @@ -794,6 +804,11 @@ class AzureOpenAI(BaseOpenAI): infer if it is a base_url or azure_endpoint and update accordingly. """ + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "llms", "openai"] + @root_validator() def validate_environment(cls, values: Dict) -> Dict: """Validate that api key and python package exists in environment.""" diff --git a/libs/langchain/langchain/llms/replicate.py b/libs/langchain/langchain/llms/replicate.py index 17dc7b42d2..5ed6e81889 100644 --- a/libs/langchain/langchain/llms/replicate.py +++ b/libs/langchain/langchain/llms/replicate.py @@ -70,6 +70,11 @@ class Replicate(LLM): def is_lc_serializable(cls) -> bool: return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "llms", "replicate"] + @root_validator(pre=True) def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]: """Build extra kwargs from additional params that were passed in.""" diff --git a/libs/langchain/langchain/llms/vertexai.py b/libs/langchain/langchain/llms/vertexai.py index 0dedeaf0df..e3038812fd 100644 --- a/libs/langchain/langchain/llms/vertexai.py +++ b/libs/langchain/langchain/llms/vertexai.py @@ -224,6 +224,11 @@ class VertexAI(_VertexAICommon, BaseLLM): def is_lc_serializable(self) -> bool: return True + @classmethod + def get_lc_namespace(cls) -> List[str]: + """Get the namespace of the langchain object.""" + return ["langchain", "llms", "vertexai"] + @root_validator() def validate_environment(cls, values: Dict) -> Dict: """Validate that the python package exists in environment.""" diff --git a/libs/langchain/langchain/output_parsers/combining.py b/libs/langchain/langchain/output_parsers/combining.py index 0a9e11b791..1222c2208d 100644 --- a/libs/langchain/langchain/output_parsers/combining.py +++ b/libs/langchain/langchain/output_parsers/combining.py @@ -9,12 +9,12 @@ from langchain_core.pydantic_v1 import root_validator class CombiningOutputParser(BaseOutputParser): """Combine multiple output parsers into one.""" + parsers: List[BaseOutputParser] + @classmethod def is_lc_serializable(cls) -> bool: return True - parsers: List[BaseOutputParser] - @root_validator() def validate_parsers(cls, values: Dict[str, Any]) -> Dict[str, Any]: """Validate the parsers.""" diff --git a/libs/langchain/tests/unit_tests/load/__snapshots__/test_dump.ambr b/libs/langchain/tests/unit_tests/load/__snapshots__/test_dump.ambr index 92427020e3..2cf2fdf8cd 100644 --- a/libs/langchain/tests/unit_tests/load/__snapshots__/test_dump.ambr +++ b/libs/langchain/tests/unit_tests/load/__snapshots__/test_dump.ambr @@ -97,7 +97,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -152,7 +152,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "ChatPromptTemplate" @@ -166,7 +166,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "chat", "HumanMessagePromptTemplate" @@ -176,7 +176,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" @@ -236,7 +236,7 @@ "lc": 1, "type": "constructor", "id": [ - "langchain_core", + "langchain", "prompts", "prompt", "PromptTemplate" diff --git a/libs/langchain/tests/unit_tests/load/test_serializable.py b/libs/langchain/tests/unit_tests/load/test_serializable.py new file mode 100644 index 0000000000..9e356ae744 --- /dev/null +++ b/libs/langchain/tests/unit_tests/load/test_serializable.py @@ -0,0 +1,55 @@ +import importlib +import pkgutil + +from langchain_core.load.mapping import SERIALIZABLE_MAPPING + + +def import_all_modules(package_name: str) -> dict: + package = importlib.import_module(package_name) + classes: dict = {} + + for attribute_name in dir(package): + attribute = getattr(package, attribute_name) + if hasattr(attribute, "is_lc_serializable") and isinstance(attribute, type): + if ( + isinstance(attribute.is_lc_serializable(), bool) # type: ignore + and attribute.is_lc_serializable() # type: ignore + ): + key = tuple(attribute.lc_id()) # type: ignore + value = tuple(attribute.__module__.split(".") + [attribute.__name__]) + if key in classes and classes[key] != value: + raise ValueError + classes[key] = value + if hasattr(package, "__path__"): + for loader, module_name, is_pkg in pkgutil.walk_packages( + package.__path__, package_name + "." + ): + if module_name not in ( + "langchain.chains.llm_bash", + "langchain.chains.llm_symbolic_math", + "langchain.tools.python", + "langchain.vectorstores._pgvector_data_models", + ): + importlib.import_module(module_name) + new_classes = import_all_modules(module_name) + for k, v in new_classes.items(): + if k in classes and classes[k] != v: + raise ValueError + classes[k] = v + return classes + + +def test_serializable_mapping() -> None: + serializable_modules = import_all_modules("langchain") + missing = set(SERIALIZABLE_MAPPING).difference(serializable_modules) + assert missing == set() + extra = set(serializable_modules).difference(SERIALIZABLE_MAPPING) + assert extra == set() + + for k, import_path in serializable_modules.items(): + import_dir, import_obj = import_path[:-1], import_path[-1] + # Import module + mod = importlib.import_module(".".join(import_dir)) + # Import class + cls = getattr(mod, import_obj) + assert list(k) == cls.lc_id()