DocsGPT/application/app.py

173 lines
5.6 KiB
Python
Raw Normal View History

2023-02-03 12:45:29 +00:00
import os
2023-02-15 18:40:23 +00:00
2023-02-03 12:45:29 +00:00
import dotenv
2023-02-15 18:40:23 +00:00
import requests
2023-02-03 12:45:29 +00:00
from flask import Flask, request, render_template
2023-02-14 13:06:28 +00:00
from langchain import FAISS
from langchain import OpenAI, VectorDBQA, HuggingFaceHub, Cohere
from langchain.chains.question_answering import load_qa_chain
2023-02-15 18:40:23 +00:00
from langchain.embeddings import OpenAIEmbeddings, HuggingFaceHubEmbeddings, CohereEmbeddings, HuggingFaceInstructEmbeddings
2023-02-03 12:45:29 +00:00
from langchain.prompts import PromptTemplate
2023-02-14 13:06:28 +00:00
2023-02-15 18:40:23 +00:00
# os.environ["LANGCHAIN_HANDLER"] = "langchain"
if os.getenv("LLM_NAME") is not None:
llm_choice = os.getenv("LLM_NAME")
else:
llm_choice = "openai"
if os.getenv("EMBEDDINGS_NAME") is not None:
embeddings_choice = os.getenv("EMBEDDINGS_NAME")
else:
embeddings_choice = "openai_text-embedding-ada-002"
2023-02-14 13:06:28 +00:00
2023-02-03 12:45:29 +00:00
2023-02-15 18:40:23 +00:00
if llm_choice == "manifest":
from manifest import Manifest
from langchain.llms.manifest import ManifestWrapper
manifest = Manifest(
client_name="huggingface",
client_connection="http://127.0.0.1:5000"
)
# Redirect PosixPath to WindowsPath on Windows
import platform
2023-02-15 18:40:23 +00:00
if platform.system() == "Windows":
import pathlib
2023-02-15 18:40:23 +00:00
temp = pathlib.PosixPath
pathlib.PosixPath = pathlib.WindowsPath
2023-02-03 12:45:29 +00:00
# loading the .env file
dotenv.load_dotenv()
with open("combine_prompt.txt", "r") as f:
template = f.read()
2023-02-15 18:40:23 +00:00
if os.getenv("API_KEY") is not None:
2023-02-08 19:41:35 +00:00
api_key_set = True
else:
api_key_set = False
2023-02-15 18:40:23 +00:00
if os.getenv("EMBEDDINGS_KEY") is not None:
embeddings_key_set = True
else:
embeddings_key_set = False
2023-02-03 12:45:29 +00:00
app = Flask(__name__)
@app.route("/")
def home():
2023-02-15 18:40:23 +00:00
return render_template("index.html", api_key_set=api_key_set, llm_choice=llm_choice,
embeddings_choice=embeddings_choice)
2023-02-03 12:45:29 +00:00
@app.route("/api/answer", methods=["POST"])
def api_answer():
data = request.get_json()
question = data["question"]
2023-02-08 19:41:35 +00:00
if not api_key_set:
api_key = data["api_key"]
else:
2023-02-15 18:40:23 +00:00
api_key = os.getenv("API_KEY")
if not embeddings_key_set:
embeddings_key = data["embeddings_key"]
else:
embeddings_key = os.getenv("EMBEDDINGS_KEY")
print(embeddings_key)
print(api_key)
2023-02-08 19:41:35 +00:00
2023-02-07 01:17:45 +00:00
# check if the vectorstore is set
if "active_docs" in data:
2023-02-07 21:53:29 +00:00
vectorstore = "vectors/" + data["active_docs"]
if data['active_docs'] == "default":
vectorstore = ""
2023-02-07 01:17:45 +00:00
else:
vectorstore = ""
2023-02-07 21:53:29 +00:00
2023-02-07 01:17:45 +00:00
# loading the index and the store and the prompt template
2023-02-15 18:40:23 +00:00
# Note if you have used other embeddings than OpenAI, you need to change the embeddings
if embeddings_choice == "openai_text-embedding-ada-002":
docsearch = FAISS.load_local(vectorstore, OpenAIEmbeddings(openai_api_key=embeddings_key))
elif embeddings_choice == "huggingface_sentence-transformers/all-mpnet-base-v2":
docsearch = FAISS.load_local(vectorstore, HuggingFaceHubEmbeddings())
elif embeddings_choice == "huggingface_hkunlp/instructor-large":
docsearch = FAISS.load_local(vectorstore, HuggingFaceInstructEmbeddings())
elif embeddings_choice == "cohere_medium":
docsearch = FAISS.load_local(vectorstore, CohereEmbeddings(cohere_api_key=embeddings_key))
2023-02-07 01:17:45 +00:00
2023-02-03 12:45:29 +00:00
# create a prompt template
c_prompt = PromptTemplate(input_variables=["summaries", "question"], template=template)
2023-02-15 18:40:23 +00:00
if llm_choice == "openai":
llm = OpenAI(openai_api_key=api_key, temperature=0)
elif llm_choice == "manifest":
llm = ManifestWrapper(client=manifest, llm_kwargs={"temperature": 0.001, "max_tokens": 2048})
elif llm_choice == "huggingface":
llm = HuggingFaceHub(repo_id="bigscience/bloom", huggingfacehub_api_token=api_key)
elif llm_choice == "cohere":
llm = Cohere(model="command-xlarge-nightly", cohere_api_key=api_key)
2023-02-14 13:06:28 +00:00
2023-02-15 18:40:23 +00:00
qa_chain = load_qa_chain(llm=llm, chain_type="map_reduce",
2023-02-14 13:06:28 +00:00
combine_prompt=c_prompt)
chain = VectorDBQA(combine_documents_chain=qa_chain, vectorstore=docsearch, k=2)
2023-02-03 12:45:29 +00:00
# fetch the answer
2023-02-14 13:06:28 +00:00
result = chain({"query": question})
print(result)
2023-02-03 12:45:29 +00:00
# some formatting for the frontend
2023-02-14 13:06:28 +00:00
result['answer'] = result['result']
2023-02-03 12:45:29 +00:00
result['answer'] = result['answer'].replace("\\n", "<br>")
result['answer'] = result['answer'].replace("SOURCES:", "")
2023-02-06 23:37:18 +00:00
# mock result
# result = {
# "answer": "The answer is 42",
# "sources": ["https://en.wikipedia.org/wiki/42_(number)", "https://en.wikipedia.org/wiki/42_(number)"]
# }
2023-02-03 12:45:29 +00:00
return result
2023-02-15 18:40:23 +00:00
2023-02-07 21:53:29 +00:00
@app.route("/api/docs_check", methods=["POST"])
def check_docs():
# check if docs exist in a vectorstore folder
data = request.get_json()
vectorstore = "vectors/" + data["docs"]
base_path = 'https://raw.githubusercontent.com/arc53/DocsHUB/main/'
#
if os.path.exists(vectorstore):
return {"status": 'exists'}
else:
r = requests.get(base_path + vectorstore + "docs.index")
# save to vectors directory
# check if the directory exists
if not os.path.exists(vectorstore):
os.makedirs(vectorstore)
with open(vectorstore + "docs.index", "wb") as f:
f.write(r.content)
# download the store
r = requests.get(base_path + vectorstore + "faiss_store.pkl")
with open(vectorstore + "faiss_store.pkl", "wb") as f:
f.write(r.content)
2023-02-15 18:40:23 +00:00
return {"status": 'loaded'}
2023-02-03 12:45:29 +00:00
# handling CORS
@app.after_request
def after_request(response):
response.headers.add('Access-Control-Allow-Origin', '*')
response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization')
response.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS')
return response
if __name__ == "__main__":
app.run(debug=True)