templates: Update neo4j templates (#19789)

pull/18520/head^2
Tomaz Bratanic 2 months ago committed by GitHub
parent 765d6762bc
commit ed49cca191
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -1,13 +1,11 @@
from pathlib import Path
from typing import List
from langchain.chains.openai_functions import create_structured_output_chain
from langchain_community.chat_models import ChatOpenAI
from langchain_community.document_loaders import TextLoader
from langchain_community.embeddings.openai import OpenAIEmbeddings
from langchain_community.graphs import Neo4jGraph
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_openai import ChatOpenAI, OpenAIEmbeddings
from langchain_text_splitters import TokenTextSplitter
from neo4j.exceptions import ClientError
@ -116,10 +114,10 @@ questions_prompt = ChatPromptTemplate.from_messages(
]
)
question_chain = create_structured_output_chain(Questions, llm, questions_prompt)
question_chain = questions_prompt | llm.with_structured_output(Questions)
for i, parent in enumerate(parent_documents):
questions = question_chain.run(parent.page_content).questions
questions = question_chain.invoke(parent.page_content).questions
params = {
"parent_id": i,
"questions": [

@ -5,6 +5,6 @@ if __name__ == "__main__":
print( # noqa: T201
chain.invoke(
{"question": original_query},
{"configurable": {"strategy": "parent_document"}},
{"configurable": {"strategy": "parent_strategy"}},
)
)

@ -1,10 +1,10 @@
from operator import itemgetter
from langchain_community.chat_models import ChatOpenAI
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.pydantic_v1 import BaseModel
from langchain_core.runnables import ConfigurableField, RunnableParallel
from langchain_openai import ChatOpenAI
from neo4j_advanced_rag.retrievers import (
hypothetic_question_vectorstore,
@ -13,6 +13,11 @@ from neo4j_advanced_rag.retrievers import (
typical_rag,
)
def format_docs(docs):
return "\n\n".join(doc.page_content for doc in docs)
template = """Answer the question based only on the following context:
{context}
@ -33,7 +38,7 @@ retriever = typical_rag.as_retriever().configurable_alternatives(
chain = (
RunnableParallel(
{
"context": itemgetter("question") | retriever,
"context": itemgetter("question") | retriever | format_docs,
"question": itemgetter("question"),
}
)

@ -1,5 +1,5 @@
from langchain_community.embeddings import OpenAIEmbeddings
from langchain_community.vectorstores import Neo4jVector
from langchain_openai import OpenAIEmbeddings
# Typical RAG retriever

@ -748,17 +748,16 @@ extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.
[[package]]
name = "langchain-core"
version = "0.1.31"
version = "0.1.36"
description = "Building applications with LLMs through composability"
optional = false
python-versions = ">=3.8.1,<4.0"
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langchain_core-0.1.31-py3-none-any.whl", hash = "sha256:ff028f00db8ff03565b542cea81be27426022a72c6545b54d8de66fa00948ab3"},
{file = "langchain_core-0.1.31.tar.gz", hash = "sha256:d660cf209bb6ce61cb1c853107b091aaa809015a55dce9e0ce19b51d4c8f2a70"},
{file = "langchain_core-0.1.36-py3-none-any.whl", hash = "sha256:564beeb18ab13deca8daf6e6e74acab52e0b8f6202110262a4c914e4450febd2"},
{file = "langchain_core-0.1.36.tar.gz", hash = "sha256:aa2432370ca3d2a5d6dd14a810aa6488bf2f622ff7a0a3dc30f6e0ed9d7f5fa8"},
]
[package.dependencies]
anyio = ">=3,<5"
jsonpatch = ">=1.33,<2.0"
langsmith = ">=0.1.0,<0.2.0"
packaging = ">=23.2,<24.0"
@ -770,6 +769,22 @@ tenacity = ">=8.1.0,<9.0.0"
[package.extras]
extended-testing = ["jinja2 (>=3,<4)"]
[[package]]
name = "langchain-openai"
version = "0.1.1"
description = "An integration package connecting OpenAI and LangChain"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langchain_openai-0.1.1-py3-none-any.whl", hash = "sha256:5cf4df5d2550af673337eafedaeec014ba52f9a25aeb8451206ca254bed01e5c"},
{file = "langchain_openai-0.1.1.tar.gz", hash = "sha256:d10e9a9fc4c8ea99ca98f23808ce44c7dcdd65354ac07ad10afe874ecf3401ca"},
]
[package.dependencies]
langchain-core = ">=0.1.33,<0.2.0"
openai = ">=1.10.0,<2.0.0"
tiktoken = ">=0.5.2,<1"
[[package]]
name = "langchain-text-splitters"
version = "0.0.1"
@ -1301,7 +1316,6 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
@ -1922,4 +1936,4 @@ multidict = ">=4.0"
[metadata]
lock-version = "2.0"
python-versions = ">=3.8.1,<4.0"
content-hash = "c124756e33358c955b0be370ad1b3e52db971fcb2e2dcf45375ee8413a0b9157"
content-hash = "bfe7292ce83c9ae2b12493f20c626c4273140c5bcda523b08632d709a844f1df"

@ -14,6 +14,7 @@ tiktoken = "^0.5.1"
openai = "<2"
neo4j = "^5.14.0"
langchain-text-splitters = ">=0.0.1,<0.1"
langchain-openai = "^0.1.1"
[tool.poetry.group.dev.dependencies]
langchain-cli = ">=0.0.21"

@ -1,10 +1,9 @@
from langchain_community.chat_models import ChatOpenAI
from langchain_community.embeddings import OpenAIEmbeddings
from langchain_community.vectorstores import Neo4jVector
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.pydantic_v1 import BaseModel
from langchain_core.runnables import RunnableParallel, RunnablePassthrough
from langchain_openai import ChatOpenAI, OpenAIEmbeddings
retrieval_query = """
MATCH (node)-[:HAS_PARENT]->(parent)
@ -12,6 +11,11 @@ WITH parent, max(score) AS score // deduplicate parents
RETURN parent.text AS text, score, {} AS metadata
"""
def format_docs(docs):
return "\n\n".join(doc.page_content for doc in docs)
vectorstore = Neo4jVector.from_existing_index(
OpenAIEmbeddings(),
index_name="retrieval",
@ -31,7 +35,9 @@ prompt = ChatPromptTemplate.from_template(template)
model = ChatOpenAI()
chain = (
RunnableParallel({"context": retriever, "question": RunnablePassthrough()})
RunnableParallel(
{"context": retriever | format_docs, "question": RunnablePassthrough()}
)
| prompt
| model
| StrOutputParser()

@ -748,17 +748,16 @@ extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.
[[package]]
name = "langchain-core"
version = "0.1.31"
version = "0.1.36"
description = "Building applications with LLMs through composability"
optional = false
python-versions = ">=3.8.1,<4.0"
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langchain_core-0.1.31-py3-none-any.whl", hash = "sha256:ff028f00db8ff03565b542cea81be27426022a72c6545b54d8de66fa00948ab3"},
{file = "langchain_core-0.1.31.tar.gz", hash = "sha256:d660cf209bb6ce61cb1c853107b091aaa809015a55dce9e0ce19b51d4c8f2a70"},
{file = "langchain_core-0.1.36-py3-none-any.whl", hash = "sha256:564beeb18ab13deca8daf6e6e74acab52e0b8f6202110262a4c914e4450febd2"},
{file = "langchain_core-0.1.36.tar.gz", hash = "sha256:aa2432370ca3d2a5d6dd14a810aa6488bf2f622ff7a0a3dc30f6e0ed9d7f5fa8"},
]
[package.dependencies]
anyio = ">=3,<5"
jsonpatch = ">=1.33,<2.0"
langsmith = ">=0.1.0,<0.2.0"
packaging = ">=23.2,<24.0"
@ -770,6 +769,22 @@ tenacity = ">=8.1.0,<9.0.0"
[package.extras]
extended-testing = ["jinja2 (>=3,<4)"]
[[package]]
name = "langchain-openai"
version = "0.1.1"
description = "An integration package connecting OpenAI and LangChain"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langchain_openai-0.1.1-py3-none-any.whl", hash = "sha256:5cf4df5d2550af673337eafedaeec014ba52f9a25aeb8451206ca254bed01e5c"},
{file = "langchain_openai-0.1.1.tar.gz", hash = "sha256:d10e9a9fc4c8ea99ca98f23808ce44c7dcdd65354ac07ad10afe874ecf3401ca"},
]
[package.dependencies]
langchain-core = ">=0.1.33,<0.2.0"
openai = ">=1.10.0,<2.0.0"
tiktoken = ">=0.5.2,<1"
[[package]]
name = "langchain-text-splitters"
version = "0.0.1"
@ -1301,7 +1316,6 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
@ -1922,4 +1936,4 @@ multidict = ">=4.0"
[metadata]
lock-version = "2.0"
python-versions = ">=3.8.1,<4.0"
content-hash = "c124756e33358c955b0be370ad1b3e52db971fcb2e2dcf45375ee8413a0b9157"
content-hash = "bfe7292ce83c9ae2b12493f20c626c4273140c5bcda523b08632d709a844f1df"

@ -14,6 +14,7 @@ tiktoken = "^0.5.1"
openai = "<2"
neo4j = "^5.14.0"
langchain-text-splitters = ">=0.0.1,<0.1"
langchain-openai = "^0.1.1"
[tool.poetry.group.dev.dependencies]
langchain-cli = ">=0.0.21"

@ -1,8 +1,8 @@
from pathlib import Path
from langchain_community.document_loaders import TextLoader
from langchain_community.embeddings.openai import OpenAIEmbeddings
from langchain_community.vectorstores import Neo4jVector
from langchain_openai import OpenAIEmbeddings
from langchain_text_splitters import TokenTextSplitter
txt_path = Path(__file__).parent / "dune.txt"

@ -1,7 +1,5 @@
from operator import itemgetter
from langchain_community.chat_models import ChatOpenAI
from langchain_community.embeddings import OpenAIEmbeddings
from langchain_community.vectorstores import Neo4jVector
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import (
@ -11,6 +9,7 @@ from langchain_core.prompts import (
)
from langchain_core.pydantic_v1 import BaseModel
from langchain_core.runnables import RunnablePassthrough
from langchain_openai import ChatOpenAI, OpenAIEmbeddings
from neo4j_vector_memory.history import get_history, save_history

@ -748,17 +748,16 @@ extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.
[[package]]
name = "langchain-core"
version = "0.1.31"
version = "0.1.36"
description = "Building applications with LLMs through composability"
optional = false
python-versions = ">=3.8.1,<4.0"
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langchain_core-0.1.31-py3-none-any.whl", hash = "sha256:ff028f00db8ff03565b542cea81be27426022a72c6545b54d8de66fa00948ab3"},
{file = "langchain_core-0.1.31.tar.gz", hash = "sha256:d660cf209bb6ce61cb1c853107b091aaa809015a55dce9e0ce19b51d4c8f2a70"},
{file = "langchain_core-0.1.36-py3-none-any.whl", hash = "sha256:564beeb18ab13deca8daf6e6e74acab52e0b8f6202110262a4c914e4450febd2"},
{file = "langchain_core-0.1.36.tar.gz", hash = "sha256:aa2432370ca3d2a5d6dd14a810aa6488bf2f622ff7a0a3dc30f6e0ed9d7f5fa8"},
]
[package.dependencies]
anyio = ">=3,<5"
jsonpatch = ">=1.33,<2.0"
langsmith = ">=0.1.0,<0.2.0"
packaging = ">=23.2,<24.0"
@ -770,6 +769,22 @@ tenacity = ">=8.1.0,<9.0.0"
[package.extras]
extended-testing = ["jinja2 (>=3,<4)"]
[[package]]
name = "langchain-openai"
version = "0.1.1"
description = "An integration package connecting OpenAI and LangChain"
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langchain_openai-0.1.1-py3-none-any.whl", hash = "sha256:5cf4df5d2550af673337eafedaeec014ba52f9a25aeb8451206ca254bed01e5c"},
{file = "langchain_openai-0.1.1.tar.gz", hash = "sha256:d10e9a9fc4c8ea99ca98f23808ce44c7dcdd65354ac07ad10afe874ecf3401ca"},
]
[package.dependencies]
langchain-core = ">=0.1.33,<0.2.0"
openai = ">=1.10.0,<2.0.0"
tiktoken = ">=0.5.2,<1"
[[package]]
name = "langchain-text-splitters"
version = "0.0.1"
@ -1301,7 +1316,6 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
@ -1922,4 +1936,4 @@ multidict = ">=4.0"
[metadata]
lock-version = "2.0"
python-versions = ">=3.8.1,<4.0"
content-hash = "c124756e33358c955b0be370ad1b3e52db971fcb2e2dcf45375ee8413a0b9157"
content-hash = "bfe7292ce83c9ae2b12493f20c626c4273140c5bcda523b08632d709a844f1df"

@ -14,6 +14,7 @@ tiktoken = "^0.5.1"
openai = "<2"
neo4j = "^5.14.0"
langchain-text-splitters = ">=0.0.1,<0.1"
langchain-openai = "^0.1.1"
[tool.poetry.group.dev.dependencies]
langchain-cli = ">=0.0.21"

Loading…
Cancel
Save