[Community]: Added Template Format Parameter in create_chat_prompt for Langchain Prompty (#25739)

- **Description:** Added a `template_format` parameter to
`create_chat_prompt` to allow `.prompty` files to handle variables in
different template formats.
- **Issue:** #25703

---------

Co-authored-by: Chester Curme <chester.curme@gmail.com>
This commit is contained in:
Mohammad Mohtashim 2024-08-26 17:48:21 +05:00 committed by GitHub
parent 7205057c3e
commit dcf2278a05
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -1,4 +1,4 @@
from typing import Any, Dict
from typing import Any, Dict, Literal
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables import Runnable, RunnableLambda
@ -7,7 +7,9 @@ from .utils import load, prepare
def create_chat_prompt(
path: str, input_name_agent_scratchpad: str = "agent_scratchpad"
path: str,
input_name_agent_scratchpad: str = "agent_scratchpad",
template_format: Literal["f-string", "mustache", "jinja2"] = "f-string",
) -> Runnable[Dict[str, Any], ChatPromptTemplate]:
"""Create a chat prompt from a Langchain schema."""
@ -23,9 +25,10 @@ def create_chat_prompt(
variable_name=input_name_agent_scratchpad, optional=True
) # type: ignore[arg-type]
)
lc_p = ChatPromptTemplate.from_messages(lc_messages)
lc_p = ChatPromptTemplate.from_messages(
lc_messages, template_format=template_format
)
lc_p = lc_p.partial(**p.inputs)
return lc_p
return RunnableLambda(runnable_chat_lambda)