docs: langchain-chroma package (#20394)

pull/20364/head^2
Erick Friis 6 months ago committed by GitHub
parent 6786fa9186
commit 20f5cd7c95
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -42,7 +42,7 @@
"metadata": {},
"outputs": [],
"source": [
"%pip install --upgrade --quiet langchain langchain-openai deepeval"
"%pip install --upgrade --quiet langchain langchain-openai deepeval langchain-chroma"
]
},
{
@ -215,8 +215,8 @@
"source": [
"import requests\n",
"from langchain.chains import RetrievalQA\n",
"from langchain_chroma import Chroma\n",
"from langchain_community.document_loaders import TextLoader\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_openai import OpenAI, OpenAIEmbeddings\n",
"from langchain_text_splitters import CharacterTextSplitter\n",
"\n",

@ -39,7 +39,7 @@
],
"source": [
"# Uncomment this to install psychicapi if you don't already have it installed\n",
"!poetry run pip -q install psychicapi"
"!poetry run pip -q install psychicapi langchain-chroma"
]
},
{
@ -78,7 +78,7 @@
"outputs": [],
"source": [
"from langchain.chains import RetrievalQAWithSourcesChain\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_chroma import Chroma\n",
"from langchain_openai import OpenAI, OpenAIEmbeddings\n",
"from langchain_text_splitters import CharacterTextSplitter"
]

@ -5,7 +5,7 @@
## Installation and Setup
```bash
pip install chromadb
pip install langchain-chroma
```
@ -15,7 +15,7 @@ There exists a wrapper around Chroma vector databases, allowing you to use it as
whether for semantic search or example selection.
```python
from langchain_community.vectorstores import Chroma
from langchain_chroma import Chroma
```
For a more detailed walkthrough of the Chroma wrapper, see [this notebook](/docs/integrations/vectorstores/chroma)

@ -28,12 +28,12 @@
" DocumentCompressorPipeline,\n",
" MergerRetriever,\n",
")\n",
"from langchain_chroma import Chroma\n",
"from langchain_community.document_transformers import (\n",
" EmbeddingsClusteringFilter,\n",
" EmbeddingsRedundantFilter,\n",
")\n",
"from langchain_community.embeddings import HuggingFaceEmbeddings\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"# Get 3 diff embeddings.\n",

@ -28,8 +28,8 @@
"import logging\n",
"\n",
"from langchain.retrievers import RePhraseQueryRetriever\n",
"from langchain_chroma import Chroma\n",
"from langchain_community.document_loaders import WebBaseLoader\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_openai import ChatOpenAI, OpenAIEmbeddings\n",
"from langchain_text_splitters import RecursiveCharacterTextSplitter"
]

@ -20,7 +20,7 @@
"## Creating a Chroma vector store\n",
"First we'll want to create a Chroma vector store and seed it with some data. We've created a small demo set of documents that contain summaries of movies.\n",
"\n",
"**Note:** The self-query retriever requires you to have `lark` installed (`pip install lark`). We also need the `chromadb` package."
"**Note:** The self-query retriever requires you to have `lark` installed (`pip install lark`). We also need the `langchain-chroma` package."
]
},
{
@ -44,7 +44,7 @@
},
"outputs": [],
"source": [
"%pip install --upgrade --quiet chromadb"
"%pip install --upgrade --quiet langchain-chroma"
]
},
{
@ -87,7 +87,7 @@
},
"outputs": [],
"source": [
"from langchain_community.vectorstores import Chroma\n",
"from langchain_chroma import Chroma\n",
"from langchain_core.documents import Document\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",

@ -13,7 +13,7 @@
"Install Chroma with:\n",
"\n",
"```sh\n",
"pip install chromadb\n",
"pip install langchain-chroma\n",
"```\n",
"\n",
"Chroma runs in various modes. See below for examples of each integrated with LangChain.\n",
@ -65,11 +65,11 @@
],
"source": [
"# import\n",
"from langchain_chroma import Chroma\n",
"from langchain_community.document_loaders import TextLoader\n",
"from langchain_community.embeddings.sentence_transformer import (\n",
" SentenceTransformerEmbeddings,\n",
")\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_text_splitters import CharacterTextSplitter\n",
"\n",
"# load the document and split it into chunks\n",

@ -43,7 +43,7 @@
"metadata": {},
"outputs": [],
"source": [
"%pip install -qU chromadb langchain langchain-community langchain-openai"
"%pip install -qU langchain langchain-community langchain-openai langchain-chroma"
]
},
{
@ -53,8 +53,8 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain_chroma import Chroma\n",
"from langchain_community.document_loaders import TextLoader\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"from langchain_text_splitters import RecursiveCharacterTextSplitter"
]

@ -20,8 +20,8 @@
"outputs": [],
"source": [
"# Build a sample vectorDB\n",
"from langchain_chroma import Chroma\n",
"from langchain_community.document_loaders import WebBaseLoader\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"from langchain_text_splitters import RecursiveCharacterTextSplitter\n",
"\n",

@ -21,7 +21,7 @@
"metadata": {},
"outputs": [],
"source": [
"%pip install --upgrade --quiet sentence-transformers > /dev/null"
"%pip install --upgrade --quiet sentence-transformers langchain-chroma langchain langchain-openai > /dev/null"
]
},
{
@ -53,11 +53,11 @@
"source": [
"from langchain.chains import LLMChain, StuffDocumentsChain\n",
"from langchain.prompts import PromptTemplate\n",
"from langchain_chroma import Chroma\n",
"from langchain_community.document_transformers import (\n",
" LongContextReorder,\n",
")\n",
"from langchain_community.embeddings import HuggingFaceEmbeddings\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_openai import OpenAI\n",
"\n",
"# Get embeddings.\n",

@ -37,8 +37,8 @@
"outputs": [],
"source": [
"from langchain.storage import InMemoryByteStore\n",
"from langchain_chroma import Chroma\n",
"from langchain_community.document_loaders import TextLoader\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"from langchain_text_splitters import RecursiveCharacterTextSplitter"
]

@ -43,8 +43,8 @@
"outputs": [],
"source": [
"from langchain.storage import InMemoryStore\n",
"from langchain_chroma import Chroma\n",
"from langchain_community.document_loaders import TextLoader\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"from langchain_text_splitters import RecursiveCharacterTextSplitter"
]

@ -30,7 +30,7 @@
"metadata": {},
"outputs": [],
"source": [
"%pip install --upgrade --quiet lark chromadb"
"%pip install --upgrade --quiet lark langchain-chroma"
]
},
{
@ -40,7 +40,7 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain_community.vectorstores import Chroma\n",
"from langchain_chroma import Chroma\n",
"from langchain_core.documents import Document\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",

@ -30,7 +30,7 @@ There are many great vector store options, here are a few that are free, open-so
This walkthrough uses the `chroma` vector database, which runs on your local machine as a library.
```bash
pip install chromadb
pip install langchain-chroma
```
We want to use OpenAIEmbeddings so we have to get the OpenAI API Key.
@ -47,7 +47,7 @@ os.environ['OPENAI_API_KEY'] = getpass.getpass('OpenAI API Key:')
from langchain_community.document_loaders import TextLoader
from langchain_openai import OpenAIEmbeddings
from langchain_text_splitters import CharacterTextSplitter
from langchain_community.vectorstores import Chroma
from langchain_chroma import Chroma
# Load the document, split it into chunks, embed each chunk and load it into the vector store.
raw_documents = TextLoader('../../../state_of_the_union.txt').load()

@ -17,7 +17,7 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain_community.vectorstores import Chroma\n",
"from langchain_chroma import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"from langchain_text_splitters import CharacterTextSplitter"
]

@ -19,7 +19,7 @@
"source": [
"from langchain.prompts import FewShotPromptTemplate, PromptTemplate\n",
"from langchain.prompts.example_selector import SemanticSimilarityExampleSelector\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_chroma import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"example_prompt = PromptTemplate(\n",

@ -254,7 +254,7 @@
],
"source": [
"from langchain.prompts.example_selector import SemanticSimilarityExampleSelector\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_chroma import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"example_selector = SemanticSimilarityExampleSelector.from_examples(\n",

@ -202,7 +202,7 @@
"outputs": [],
"source": [
"from langchain.prompts import SemanticSimilarityExampleSelector\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_chroma import Chroma\n",
"from langchain_openai import OpenAIEmbeddings"
]
},

@ -64,7 +64,7 @@
}
],
"source": [
"%pip install --upgrade --quiet langchain langchain-openai\n",
"%pip install --upgrade --quiet langchain langchain-openai langchain-chroma\n",
"\n",
"# Set env var OPENAI_API_KEY or load from a .env file:\n",
"import dotenv\n",
@ -391,7 +391,7 @@
}
],
"source": [
"%pip install --upgrade --quiet chromadb beautifulsoup4"
"%pip install --upgrade --quiet langchain-chroma beautifulsoup4"
]
},
{
@ -445,7 +445,7 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain_community.vectorstores import Chroma\n",
"from langchain_chroma import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"vectorstore = Chroma.from_documents(documents=all_splits, embedding=OpenAIEmbeddings())"

@ -48,7 +48,7 @@
}
],
"source": [
"%pip install --upgrade --quiet langchain langchain-openai chromadb beautifulsoup4\n",
"%pip install --upgrade --quiet langchain langchain-openai langchain-chroma beautifulsoup4\n",
"\n",
"# Set env var OPENAI_API_KEY or load from a .env file:\n",
"import dotenv\n",
@ -129,7 +129,7 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain_community.vectorstores import Chroma\n",
"from langchain_chroma import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"vectorstore = Chroma.from_documents(documents=all_splits, embedding=OpenAIEmbeddings())"

@ -45,7 +45,7 @@
"metadata": {},
"outputs": [],
"source": [
"%pip install --upgrade --quiet langchain-openai tiktoken chromadb langchain git\n",
"%pip install --upgrade --quiet langchain-openai tiktoken langchain-chroma langchain git\n",
"\n",
"# Set env var OPENAI_API_KEY or load from a .env file\n",
"# import dotenv\n",
@ -201,7 +201,7 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain_community.vectorstores import Chroma\n",
"from langchain_chroma import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"db = Chroma.from_documents(texts, OpenAIEmbeddings(disallowed_special=()))\n",

@ -38,7 +38,7 @@
"metadata": {},
"outputs": [],
"source": [
"# %pip install -qU langchain langchain-community langchain-openai faker"
"# %pip install -qU langchain langchain-community langchain-openai faker langchain-chroma"
]
},
{
@ -394,7 +394,7 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain_community.vectorstores import Chroma\n",
"from langchain_chroma import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"embeddings = OpenAIEmbeddings(model=\"text-embedding-3-small\")\n",

@ -36,7 +36,7 @@
"metadata": {},
"outputs": [],
"source": [
"# %pip install -qU langchain langchain-community langchain-openai chromadb"
"# %pip install -qU langchain langchain-community langchain-openai langchain-chroma"
]
},
{
@ -84,7 +84,7 @@
"outputs": [],
"source": [
"from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_chroma import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"texts = [\"Harrison worked at Kensho\", \"Ankush worked at Facebook\"]\n",

@ -36,7 +36,7 @@
"metadata": {},
"outputs": [],
"source": [
"# %pip install -qU langchain langchain-community langchain-openai chromadb"
"# %pip install -qU langchain langchain-community langchain-openai langchain-chroma"
]
},
{
@ -84,7 +84,7 @@
"outputs": [],
"source": [
"from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_chroma import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"texts = [\"Harrison worked at Kensho\"]\n",

@ -38,7 +38,7 @@
"metadata": {},
"outputs": [],
"source": [
"# %pip install -qU langchain langchain-community langchain-openai chromadb"
"# %pip install -qU langchain langchain-community langchain-openai langchain-chroma"
]
},
{
@ -86,7 +86,7 @@
"outputs": [],
"source": [
"from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_chroma import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"\n",
"texts = [\"Harrison worked at Kensho\"]\n",

@ -38,7 +38,7 @@
"metadata": {},
"outputs": [],
"source": [
"# %pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube chromadb"
"# %pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma"
]
},
{
@ -249,7 +249,7 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain_community.vectorstores import Chroma\n",
"from langchain_chroma import Chroma\n",
"from langchain_openai import OpenAIEmbeddings\n",
"from langchain_text_splitters import RecursiveCharacterTextSplitter\n",
"\n",

@ -48,7 +48,7 @@
"metadata": {},
"outputs": [],
"source": [
"%pip install --upgrade --quiet langchain langchain-community langchainhub langchain-openai chromadb bs4"
"%pip install --upgrade --quiet langchain langchain-community langchainhub langchain-openai langchain-chroma bs4"
]
},
{
@ -118,8 +118,8 @@
"source": [
"import bs4\n",
"from langchain import hub\n",
"from langchain_chroma import Chroma\n",
"from langchain_community.document_loaders import WebBaseLoader\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_core.output_parsers import StrOutputParser\n",
"from langchain_core.runnables import RunnablePassthrough\n",
"from langchain_openai import ChatOpenAI, OpenAIEmbeddings\n",
@ -406,9 +406,9 @@
"from langchain import hub\n",
"from langchain.chains import create_history_aware_retriever, create_retrieval_chain\n",
"from langchain.chains.combine_documents import create_stuff_documents_chain\n",
"from langchain_chroma import Chroma\n",
"from langchain_community.chat_message_histories import ChatMessageHistory\n",
"from langchain_community.document_loaders import WebBaseLoader\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_core.chat_history import BaseChatMessageHistory\n",
"from langchain_core.output_parsers import StrOutputParser\n",
"from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n",

@ -27,7 +27,7 @@
"metadata": {},
"outputs": [],
"source": [
"%pip install --upgrade --quiet langchain langchain-community langchainhub gpt4all chromadb "
"%pip install --upgrade --quiet langchain langchain-community langchainhub gpt4all langchain-chroma "
]
},
{
@ -72,8 +72,8 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain_chroma import Chroma\n",
"from langchain_community.embeddings import GPT4AllEmbeddings\n",
"from langchain_community.vectorstores import Chroma\n",
"\n",
"vectorstore = Chroma.from_documents(documents=all_splits, embedding=GPT4AllEmbeddings())"
]

@ -72,7 +72,7 @@ in this walkthrough, but everything shown here works with any
Well use the following packages:
```python
%pip install --upgrade --quiet langchain langchain-community langchainhub langchain-openai chromadb bs4
%pip install --upgrade --quiet langchain langchain-community langchainhub langchain-openai langchain-chroma bs4
```
We need to set environment variable `OPENAI_API_KEY` for the embeddings model, which can be done
@ -120,7 +120,7 @@ lines of code:
import bs4
from langchain import hub
from langchain_community.document_loaders import WebBaseLoader
from langchain_community.vectorstores import Chroma
from langchain_chroma import Chroma
from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnablePassthrough
from langchain_openai import OpenAIEmbeddings
@ -350,7 +350,7 @@ vector store and
model.
```python
from langchain_community.vectorstores import Chroma
from langchain_chroma import Chroma
from langchain_openai import OpenAIEmbeddings
vectorstore = Chroma.from_documents(documents=all_splits, embedding=OpenAIEmbeddings())

@ -43,7 +43,7 @@
"metadata": {},
"outputs": [],
"source": [
"%pip install --upgrade --quiet langchain langchain-community langchainhub langchain-openai chromadb bs4"
"%pip install --upgrade --quiet langchain langchain-community langchainhub langchain-openai langchain-chroma bs4"
]
},
{
@ -113,8 +113,8 @@
"source": [
"import bs4\n",
"from langchain import hub\n",
"from langchain_chroma import Chroma\n",
"from langchain_community.document_loaders import WebBaseLoader\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_core.output_parsers import StrOutputParser\n",
"from langchain_core.runnables import RunnablePassthrough\n",
"from langchain_openai import ChatOpenAI, OpenAIEmbeddings\n",

@ -43,7 +43,7 @@
"metadata": {},
"outputs": [],
"source": [
"%pip install --upgrade --quiet langchain langchain-community langchainhub langchain-openai chromadb bs4"
"%pip install --upgrade --quiet langchain langchain-community langchainhub langchain-openai langchain-chroma bs4"
]
},
{
@ -113,8 +113,8 @@
"source": [
"import bs4\n",
"from langchain import hub\n",
"from langchain_chroma import Chroma\n",
"from langchain_community.document_loaders import WebBaseLoader\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_core.output_parsers import StrOutputParser\n",
"from langchain_core.runnables import RunnableParallel, RunnablePassthrough\n",
"from langchain_openai import ChatOpenAI, OpenAIEmbeddings\n",

@ -480,8 +480,8 @@
"outputs": [],
"source": [
"from langchain.retrievers.web_research import WebResearchRetriever\n",
"from langchain_chroma import Chroma\n",
"from langchain_community.utilities import GoogleSearchAPIWrapper\n",
"from langchain_community.vectorstores import Chroma\n",
"from langchain_openai import ChatOpenAI, OpenAIEmbeddings"
]
},

Loading…
Cancel
Save