langchain/templates/llama2-functions/llama2_functions/chain.py

43 lines
1.5 KiB
Python
Raw Normal View History

from langchain_community.llms import Replicate
2024-01-03 21:28:05 +00:00
from langchain_core.prompts import ChatPromptTemplate
# LLM
replicate_id = "andreasjansson/llama-2-13b-chat-gguf:60ec5dda9ff9ee0b6f786c9d1157842e6ab3cc931139ad98fe99e08a35c5d4d4" # noqa: E501
model = Replicate(
model=replicate_id,
2023-10-29 22:50:09 +00:00
model_kwargs={"temperature": 0.8, "max_length": 500, "top_p": 0.95},
)
# Prompt with output schema specification
2023-10-30 18:27:44 +00:00
template = """You are an AI language model assistant. Your task is to generate 3 different versions of the given user /
question to retrieve relevant documents from a vector database. By generating multiple perspectives on the user /
question, your goal is to help the user overcome some of the limitations of distance-based similarity search. /
Respond with json that adheres to the following jsonschema:
{{
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"properties": {{
2023-10-30 18:27:44 +00:00
"question_1": {{
"type": "string",
2023-10-30 18:27:44 +00:00
"description": "First version of the user question."
}},
2023-10-30 18:27:44 +00:00
"question_2": {{
"type": "string",
2023-10-30 18:27:44 +00:00
"description": "Second version of the user question."
}},
"question_3": {{
"type": "string",
"description": "Third version of the user question."
}}
}},
2023-10-30 18:27:44 +00:00
"required": ["question_1","question_2","question_3"],
"additionalProperties": false
}}""" # noqa: E501
2023-10-30 18:27:44 +00:00
prompt = ChatPromptTemplate.from_messages(
[("system", template), ("human", "{question}")]
)
# Chain
2023-10-29 22:50:09 +00:00
chain = prompt | model