mirror of
https://github.com/hwchase17/langchain
synced 2024-11-10 01:10:59 +00:00
af875cff57
Co-authored-by: Micky Liu <wayliu@microsoft.com> Co-authored-by: wayliums <wayliums@users.noreply.github.com> Co-authored-by: Erick Friis <erick@langchain.dev>
30 lines
964 B
Python
30 lines
964 B
Python
from typing import Any, Dict
|
|
|
|
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
|
|
from langchain_core.runnables import Runnable, RunnableLambda
|
|
|
|
from .utils import load, prepare
|
|
|
|
|
|
def create_chat_prompt(
|
|
path: str, input_name_agent_scratchpad: str = "agent_scratchpad"
|
|
) -> Runnable[Dict[str, Any], ChatPromptTemplate]:
|
|
def runnable_chat_lambda(inputs: Dict[str, Any]) -> ChatPromptTemplate:
|
|
p = load(path)
|
|
parsed = prepare(p, inputs)
|
|
lc_messages = []
|
|
for message in parsed:
|
|
lc_messages.append((message["role"], message["content"]))
|
|
|
|
lc_messages.append(
|
|
MessagesPlaceholder(
|
|
variable_name=input_name_agent_scratchpad, optional=True
|
|
) # type: ignore[arg-type]
|
|
)
|
|
lc_p = ChatPromptTemplate.from_messages(lc_messages)
|
|
lc_p = lc_p.partial(**p.inputs)
|
|
|
|
return lc_p
|
|
|
|
return RunnableLambda(runnable_chat_lambda)
|