langchain/templates/self-query-qdrant/self_query_qdrant/chain.py

93 lines
3.4 KiB
Python
Raw Normal View History

import os
from typing import List, Optional
from langchain.chains.query_constructor.schema import AttributeInfo
from langchain.retrievers import SelfQueryRetriever
from langchain.schema import Document, StrOutputParser
from langchain_community.embeddings import OpenAIEmbeddings
from langchain_community.llms import BaseLLM
from langchain_community.llms.openai import OpenAI
from langchain_community.vectorstores.qdrant import Qdrant
docs[patch], templates[patch]: Import from core (#14575) Update imports to use core for the low-hanging fruit changes. Ran following ```bash git grep -l 'langchain.schema.runnable' {docs,templates,cookbook} | xargs sed -i '' 's/langchain\.schema\.runnable/langchain_core.runnables/g' git grep -l 'langchain.schema.output_parser' {docs,templates,cookbook} | xargs sed -i '' 's/langchain\.schema\.output_parser/langchain_core.output_parsers/g' git grep -l 'langchain.schema.messages' {docs,templates,cookbook} | xargs sed -i '' 's/langchain\.schema\.messages/langchain_core.messages/g' git grep -l 'langchain.schema.chat_histry' {docs,templates,cookbook} | xargs sed -i '' 's/langchain\.schema\.chat_history/langchain_core.chat_history/g' git grep -l 'langchain.schema.prompt_template' {docs,templates,cookbook} | xargs sed -i '' 's/langchain\.schema\.prompt_template/langchain_core.prompts/g' git grep -l 'from langchain.pydantic_v1' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.pydantic_v1/from langchain_core.pydantic_v1/g' git grep -l 'from langchain.tools.base' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.tools\.base/from langchain_core.tools/g' git grep -l 'from langchain.chat_models.base' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.chat_models.base/from langchain_core.language_models.chat_models/g' git grep -l 'from langchain.llms.base' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.llms\.base\ /from langchain_core.language_models.llms\ /g' git grep -l 'from langchain.embeddings.base' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.embeddings\.base/from langchain_core.embeddings/g' git grep -l 'from langchain.vectorstores.base' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.vectorstores\.base/from langchain_core.vectorstores/g' git grep -l 'from langchain.agents.tools' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.agents\.tools/from langchain_core.tools/g' git grep -l 'from langchain.schema.output' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.schema\.output\ /from langchain_core.outputs\ /g' git grep -l 'from langchain.schema.embeddings' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.schema\.embeddings/from langchain_core.embeddings/g' git grep -l 'from langchain.schema.document' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.schema\.document/from langchain_core.documents/g' git grep -l 'from langchain.schema.agent' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.schema\.agent/from langchain_core.agents/g' git grep -l 'from langchain.schema.prompt ' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.schema\.prompt\ /from langchain_core.prompt_values /g' git grep -l 'from langchain.schema.language_model' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.schema\.language_model/from langchain_core.language_models/g' ```
2023-12-12 00:49:10 +00:00
from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel
from langchain_core.runnables import RunnableParallel, RunnablePassthrough
from qdrant_client import QdrantClient
from self_query_qdrant import defaults, helper, prompts
class Query(BaseModel):
__root__: str
def create_chain(
llm: Optional[BaseLLM] = None,
embeddings: Optional[Embeddings] = None,
document_contents: str = defaults.DEFAULT_DOCUMENT_CONTENTS,
metadata_field_info: List[AttributeInfo] = defaults.DEFAULT_METADATA_FIELD_INFO,
collection_name: str = defaults.DEFAULT_COLLECTION_NAME,
):
"""
Create a chain that can be used to query a Qdrant vector store with a self-querying
capability. By default, this chain will use the OpenAI LLM and OpenAIEmbeddings, and
work with the default document contents and metadata field info. You can override
these defaults by passing in your own values.
:param llm: an LLM to use for generating text
:param embeddings: an Embeddings to use for generating queries
:param document_contents: a description of the document set
:param metadata_field_info: list of metadata attributes
:param collection_name: name of the Qdrant collection to use
:return:
"""
llm = llm or OpenAI()
embeddings = embeddings or OpenAIEmbeddings()
# Set up a vector store to store your vectors and metadata
client = QdrantClient(
url=os.environ.get("QDRANT_URL", "http://localhost:6333"),
api_key=os.environ.get("QDRANT_API_KEY"),
)
vectorstore = Qdrant(
client=client,
collection_name=collection_name,
embeddings=embeddings,
)
# Set up a retriever to query your vector store with self-querying capabilities
retriever = SelfQueryRetriever.from_llm(
llm, vectorstore, document_contents, metadata_field_info, verbose=True
)
context = RunnableParallel(
context=retriever | helper.combine_documents,
query=RunnablePassthrough(),
)
pipeline = context | prompts.LLM_CONTEXT_PROMPT | llm | StrOutputParser()
return pipeline.with_types(input_type=Query)
def initialize(
embeddings: Optional[Embeddings] = None,
collection_name: str = defaults.DEFAULT_COLLECTION_NAME,
documents: List[Document] = defaults.DEFAULT_DOCUMENTS,
):
"""
Initialize a vector store with a set of documents. By default, the documents will be
compatible with the default metadata field info. You can override these defaults by
passing in your own values.
:param embeddings: an Embeddings to use for generating queries
:param collection_name: name of the Qdrant collection to use
:param documents: a list of documents to initialize the vector store with
:return:
"""
embeddings = embeddings or OpenAIEmbeddings()
# Set up a vector store to store your vectors and metadata
Qdrant.from_documents(
documents, embedding=embeddings, collection_name=collection_name
)
# Create the default chain
chain = create_chain()