diff --git a/examples/vector_databases/qdrant/QA_with_Langchain_Qdrant_and_OpenAI.ipynb b/examples/vector_databases/qdrant/QA_with_Langchain_Qdrant_and_OpenAI.ipynb index 1618239c..dad6498f 100644 --- a/examples/vector_databases/qdrant/QA_with_Langchain_Qdrant_and_OpenAI.ipynb +++ b/examples/vector_databases/qdrant/QA_with_Langchain_Qdrant_and_OpenAI.ipynb @@ -9,13 +9,13 @@ "This notebook presents how to implement a Question Answering system with Langchain, Qdrant as a knowledge based and OpenAI embeddings. If you are not familiar with Qdrant, it's better to check out the [Getting_started_with_Qdrant_and_OpenAI.ipynb](Getting_started_with_Qdrant_and_OpenAI.ipynb) notebook.\n", "\n", "This notebook presents an end-to-end process of:\n", - "1. Using ra embeddings created by OpenAI API.\n", + "1. Calculating the embeddings with OpenAI API.\n", "2. Storing the embeddings in a local instance of Qdrant to build a knowledge base.\n", "3. Converting raw text query to an embedding with OpenAI API.\n", "4. Using Qdrant to perform the nearest neighbour search in the created collection to find some context.\n", - "5. Asking LLM to find the answer in given context.\n", + "5. Asking LLM to find the answer in a given context.\n", "\n", - "All the steps will be simplified to a calling some corresponding Langchain methods." + "All the steps will be simplified to calling some corresponding Langchain methods." ] }, { @@ -108,7 +108,7 @@ }, "outputs": [], "source": [ - "! pip install openai qdrant-client \"langchain==0.0.87\" wget" + "! pip install openai qdrant-client \"langchain==0.0.100\" wget" ] }, {