mirror of
https://github.com/arc53/DocsGPT
synced 2024-11-17 21:26:26 +00:00
Merge pull request #74 from arc53/fix-dbqa
fix dbqa, with new chain type, also fix for doc export
This commit is contained in:
commit
ea41d6815b
@ -5,8 +5,8 @@ import datetime
|
|||||||
from flask import Flask, request, render_template
|
from flask import Flask, request, render_template
|
||||||
# os.environ["LANGCHAIN_HANDLER"] = "langchain"
|
# os.environ["LANGCHAIN_HANDLER"] = "langchain"
|
||||||
import faiss
|
import faiss
|
||||||
from langchain import OpenAI
|
from langchain import OpenAI, VectorDBQA
|
||||||
from langchain.chains import VectorDBQAWithSourcesChain
|
from langchain.chains.question_answering import load_qa_chain
|
||||||
from langchain.prompts import PromptTemplate
|
from langchain.prompts import PromptTemplate
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
@ -69,11 +69,22 @@ def api_answer():
|
|||||||
c_prompt = PromptTemplate(input_variables=["summaries", "question"], template=template)
|
c_prompt = PromptTemplate(input_variables=["summaries", "question"], template=template)
|
||||||
# create a chain with the prompt template and the store
|
# create a chain with the prompt template and the store
|
||||||
|
|
||||||
chain = VectorDBQAWithSourcesChain.from_llm(llm=OpenAI(openai_api_key=api_key, temperature=0), vectorstore=store, combine_prompt=c_prompt)
|
#chain = VectorDBQA.from_llm(llm=OpenAI(openai_api_key=api_key, temperature=0), vectorstore=store, combine_prompt=c_prompt)
|
||||||
|
# chain = VectorDBQA.from_chain_type(llm=OpenAI(openai_api_key=api_key, temperature=0), chain_type='map_reduce',
|
||||||
|
# vectorstore=store)
|
||||||
|
|
||||||
|
qa_chain = load_qa_chain(OpenAI(openai_api_key=api_key, temperature=0), chain_type="map_reduce",
|
||||||
|
combine_prompt=c_prompt)
|
||||||
|
chain = VectorDBQA(combine_documents_chain=qa_chain, vectorstore=store)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# fetch the answer
|
# fetch the answer
|
||||||
result = chain({"question": question})
|
result = chain({"query": question})
|
||||||
|
print(result)
|
||||||
|
|
||||||
# some formatting for the frontend
|
# some formatting for the frontend
|
||||||
|
result['answer'] = result['result']
|
||||||
result['answer'] = result['answer'].replace("\\n", "<br>")
|
result['answer'] = result['answer'].replace("\\n", "<br>")
|
||||||
result['answer'] = result['answer'].replace("SOURCES:", "")
|
result['answer'] = result['answer'].replace("SOURCES:", "")
|
||||||
# mock result
|
# mock result
|
||||||
|
@ -31,13 +31,20 @@ def call_openai_api(docs):
|
|||||||
print("Error on ", i)
|
print("Error on ", i)
|
||||||
print("Saving progress")
|
print("Saving progress")
|
||||||
print(f"stopped at {c1} out of {len(docs)}")
|
print(f"stopped at {c1} out of {len(docs)}")
|
||||||
store.save_local("outputs")
|
faiss.write_index(store.index, "docs.index")
|
||||||
|
store.index = None
|
||||||
|
with open("faiss_store.pkl", "wb") as f:
|
||||||
|
pickle.dump(store, f)
|
||||||
print("Sleeping for 10 seconds and trying again")
|
print("Sleeping for 10 seconds and trying again")
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
store.add_texts([i.page_content], metadatas=[i.metadata])
|
store.add_texts([i.page_content], metadatas=[i.metadata])
|
||||||
c1 += 1
|
c1 += 1
|
||||||
|
|
||||||
store.save_local("outputs")
|
|
||||||
|
faiss.write_index(store.index, "docs.index")
|
||||||
|
store.index = None
|
||||||
|
with open("faiss_store.pkl", "wb") as f:
|
||||||
|
pickle.dump(store, f)
|
||||||
|
|
||||||
def get_user_permission(docs):
|
def get_user_permission(docs):
|
||||||
# Function to ask user permission to call the OpenAI api and spend their OpenAI funds.
|
# Function to ask user permission to call the OpenAI api and spend their OpenAI funds.
|
||||||
|
Loading…
Reference in New Issue
Block a user