2023-02-03 12:45:29 +00:00
|
|
|
import os
|
|
|
|
import pickle
|
|
|
|
import dotenv
|
|
|
|
import datetime
|
|
|
|
from flask import Flask, request, render_template
|
|
|
|
# os.environ["LANGCHAIN_HANDLER"] = "langchain"
|
|
|
|
import faiss
|
2023-02-12 17:58:54 +00:00
|
|
|
from langchain import OpenAI, VectorDBQA
|
|
|
|
from langchain.chains.question_answering import load_qa_chain
|
2023-02-03 12:45:29 +00:00
|
|
|
from langchain.prompts import PromptTemplate
|
2023-02-07 21:53:29 +00:00
|
|
|
import requests
|
2023-02-03 12:45:29 +00:00
|
|
|
|
2023-02-05 13:24:01 +00:00
|
|
|
# Redirect PosixPath to WindowsPath on Windows
|
|
|
|
import platform
|
|
|
|
if platform.system() == "Windows":
|
|
|
|
import pathlib
|
|
|
|
temp = pathlib.PosixPath
|
|
|
|
pathlib.PosixPath = pathlib.WindowsPath
|
|
|
|
|
2023-02-03 12:45:29 +00:00
|
|
|
# loading the .env file
|
|
|
|
dotenv.load_dotenv()
|
|
|
|
|
2023-02-07 01:17:45 +00:00
|
|
|
|
2023-02-03 12:45:29 +00:00
|
|
|
with open("combine_prompt.txt", "r") as f:
|
|
|
|
template = f.read()
|
|
|
|
|
2023-02-08 19:41:35 +00:00
|
|
|
# check if OPENAI_API_KEY is set
|
|
|
|
if os.getenv("OPENAI_API_KEY") is not None:
|
|
|
|
api_key_set = True
|
|
|
|
|
|
|
|
else:
|
|
|
|
api_key_set = False
|
|
|
|
|
2023-02-07 01:17:45 +00:00
|
|
|
|
2023-02-03 12:45:29 +00:00
|
|
|
|
|
|
|
app = Flask(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/")
|
|
|
|
def home():
|
2023-02-08 19:41:35 +00:00
|
|
|
return render_template("index.html", api_key_set=api_key_set)
|
2023-02-03 12:45:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
@app.route("/api/answer", methods=["POST"])
|
|
|
|
def api_answer():
|
|
|
|
data = request.get_json()
|
|
|
|
question = data["question"]
|
2023-02-08 19:41:35 +00:00
|
|
|
if not api_key_set:
|
|
|
|
api_key = data["api_key"]
|
|
|
|
else:
|
|
|
|
api_key = os.getenv("OPENAI_API_KEY")
|
|
|
|
|
2023-02-07 01:17:45 +00:00
|
|
|
# check if the vectorstore is set
|
|
|
|
if "active_docs" in data:
|
2023-02-07 21:53:29 +00:00
|
|
|
vectorstore = "vectors/" + data["active_docs"]
|
|
|
|
if data['active_docs'] == "default":
|
|
|
|
vectorstore = ""
|
2023-02-07 01:17:45 +00:00
|
|
|
else:
|
|
|
|
vectorstore = ""
|
2023-02-07 21:53:29 +00:00
|
|
|
|
2023-02-07 01:17:45 +00:00
|
|
|
# loading the index and the store and the prompt template
|
|
|
|
index = faiss.read_index(f"{vectorstore}docs.index")
|
|
|
|
|
|
|
|
with open(f"{vectorstore}faiss_store.pkl", "rb") as f:
|
|
|
|
store = pickle.load(f)
|
2023-02-03 12:45:29 +00:00
|
|
|
|
|
|
|
store.index = index
|
|
|
|
# create a prompt template
|
|
|
|
c_prompt = PromptTemplate(input_variables=["summaries", "question"], template=template)
|
|
|
|
# create a chain with the prompt template and the store
|
2023-02-08 19:41:35 +00:00
|
|
|
|
2023-02-12 17:58:54 +00:00
|
|
|
#chain = VectorDBQA.from_llm(llm=OpenAI(openai_api_key=api_key, temperature=0), vectorstore=store, combine_prompt=c_prompt)
|
|
|
|
# chain = VectorDBQA.from_chain_type(llm=OpenAI(openai_api_key=api_key, temperature=0), chain_type='map_reduce',
|
|
|
|
# vectorstore=store)
|
|
|
|
|
|
|
|
qa_chain = load_qa_chain(OpenAI(openai_api_key=api_key, temperature=0), chain_type="map_reduce",
|
|
|
|
combine_prompt=c_prompt)
|
|
|
|
chain = VectorDBQA(combine_documents_chain=qa_chain, vectorstore=store)
|
|
|
|
|
|
|
|
|
|
|
|
|
2023-02-03 12:45:29 +00:00
|
|
|
# fetch the answer
|
2023-02-12 17:58:54 +00:00
|
|
|
result = chain({"query": question})
|
|
|
|
print(result)
|
2023-02-03 12:45:29 +00:00
|
|
|
|
|
|
|
# some formatting for the frontend
|
2023-02-12 17:58:54 +00:00
|
|
|
result['answer'] = result['result']
|
2023-02-03 12:45:29 +00:00
|
|
|
result['answer'] = result['answer'].replace("\\n", "<br>")
|
|
|
|
result['answer'] = result['answer'].replace("SOURCES:", "")
|
2023-02-06 23:37:18 +00:00
|
|
|
# mock result
|
|
|
|
# result = {
|
|
|
|
# "answer": "The answer is 42",
|
|
|
|
# "sources": ["https://en.wikipedia.org/wiki/42_(number)", "https://en.wikipedia.org/wiki/42_(number)"]
|
|
|
|
# }
|
2023-02-03 12:45:29 +00:00
|
|
|
return result
|
|
|
|
|
2023-02-07 21:53:29 +00:00
|
|
|
@app.route("/api/docs_check", methods=["POST"])
|
|
|
|
def check_docs():
|
|
|
|
# check if docs exist in a vectorstore folder
|
|
|
|
data = request.get_json()
|
|
|
|
vectorstore = "vectors/" + data["docs"]
|
|
|
|
base_path = 'https://raw.githubusercontent.com/arc53/DocsHUB/main/'
|
|
|
|
#
|
|
|
|
if os.path.exists(vectorstore):
|
|
|
|
return {"status": 'exists'}
|
|
|
|
else:
|
|
|
|
r = requests.get(base_path + vectorstore + "docs.index")
|
|
|
|
# save to vectors directory
|
|
|
|
# check if the directory exists
|
|
|
|
if not os.path.exists(vectorstore):
|
|
|
|
os.makedirs(vectorstore)
|
|
|
|
|
|
|
|
with open(vectorstore + "docs.index", "wb") as f:
|
|
|
|
f.write(r.content)
|
|
|
|
# download the store
|
|
|
|
r = requests.get(base_path + vectorstore + "faiss_store.pkl")
|
|
|
|
with open(vectorstore + "faiss_store.pkl", "wb") as f:
|
|
|
|
f.write(r.content)
|
|
|
|
|
|
|
|
return {"status": 'loaded'}
|
2023-02-03 12:45:29 +00:00
|
|
|
|
|
|
|
# handling CORS
|
|
|
|
@app.after_request
|
|
|
|
def after_request(response):
|
|
|
|
response.headers.add('Access-Control-Allow-Origin', '*')
|
|
|
|
response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization')
|
|
|
|
response.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS')
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
app.run(debug=True)
|