mirror of
https://github.com/arc53/DocsGPT
synced 2024-11-17 21:26:26 +00:00
Merge pull request #117 from unkwnownGriot/no-api-key-error500-fixed
Fix the servor 500 error and show error message to client
This commit is contained in:
commit
918e1b3bf6
2
.gitignore
vendored
2
.gitignore
vendored
@ -108,7 +108,7 @@ venv/
|
|||||||
ENV/
|
ENV/
|
||||||
env.bak/
|
env.bak/
|
||||||
venv.bak/
|
venv.bak/
|
||||||
|
.flaskenv
|
||||||
# Spyder project settings
|
# Spyder project settings
|
||||||
.spyderproject
|
.spyderproject
|
||||||
.spyproject
|
.spyproject
|
||||||
|
@ -9,7 +9,7 @@ from langchain import OpenAI, VectorDBQA, HuggingFaceHub, Cohere
|
|||||||
from langchain.chains.question_answering import load_qa_chain
|
from langchain.chains.question_answering import load_qa_chain
|
||||||
from langchain.embeddings import OpenAIEmbeddings, HuggingFaceHubEmbeddings, CohereEmbeddings, HuggingFaceInstructEmbeddings
|
from langchain.embeddings import OpenAIEmbeddings, HuggingFaceHubEmbeddings, CohereEmbeddings, HuggingFaceInstructEmbeddings
|
||||||
from langchain.prompts import PromptTemplate
|
from langchain.prompts import PromptTemplate
|
||||||
|
from error import bad_request
|
||||||
# os.environ["LANGCHAIN_HANDLER"] = "langchain"
|
# os.environ["LANGCHAIN_HANDLER"] = "langchain"
|
||||||
|
|
||||||
if os.getenv("LLM_NAME") is not None:
|
if os.getenv("LLM_NAME") is not None:
|
||||||
@ -74,6 +74,7 @@ def api_answer():
|
|||||||
data = request.get_json()
|
data = request.get_json()
|
||||||
question = data["question"]
|
question = data["question"]
|
||||||
history = data["history"]
|
history = data["history"]
|
||||||
|
print('-'*5)
|
||||||
if not api_key_set:
|
if not api_key_set:
|
||||||
api_key = data["api_key"]
|
api_key = data["api_key"]
|
||||||
else:
|
else:
|
||||||
@ -83,62 +84,68 @@ def api_answer():
|
|||||||
else:
|
else:
|
||||||
embeddings_key = os.getenv("EMBEDDINGS_KEY")
|
embeddings_key = os.getenv("EMBEDDINGS_KEY")
|
||||||
|
|
||||||
|
# use try and except to check for exception
|
||||||
|
try:
|
||||||
|
|
||||||
# check if the vectorstore is set
|
# check if the vectorstore is set
|
||||||
if "active_docs" in data:
|
if "active_docs" in data:
|
||||||
vectorstore = "vectors/" + data["active_docs"]
|
vectorstore = "vectors/" + data["active_docs"]
|
||||||
if data['active_docs'] == "default":
|
if data['active_docs'] == "default":
|
||||||
|
vectorstore = ""
|
||||||
|
else:
|
||||||
vectorstore = ""
|
vectorstore = ""
|
||||||
else:
|
|
||||||
vectorstore = ""
|
|
||||||
|
|
||||||
# loading the index and the store and the prompt template
|
# loading the index and the store and the prompt template
|
||||||
# Note if you have used other embeddings than OpenAI, you need to change the embeddings
|
# Note if you have used other embeddings than OpenAI, you need to change the embeddings
|
||||||
if embeddings_choice == "openai_text-embedding-ada-002":
|
if embeddings_choice == "openai_text-embedding-ada-002":
|
||||||
docsearch = FAISS.load_local(vectorstore, OpenAIEmbeddings(openai_api_key=embeddings_key))
|
docsearch = FAISS.load_local(vectorstore, OpenAIEmbeddings(openai_api_key=embeddings_key))
|
||||||
elif embeddings_choice == "huggingface_sentence-transformers/all-mpnet-base-v2":
|
elif embeddings_choice == "huggingface_sentence-transformers/all-mpnet-base-v2":
|
||||||
docsearch = FAISS.load_local(vectorstore, HuggingFaceHubEmbeddings())
|
docsearch = FAISS.load_local(vectorstore, HuggingFaceHubEmbeddings())
|
||||||
elif embeddings_choice == "huggingface_hkunlp/instructor-large":
|
elif embeddings_choice == "huggingface_hkunlp/instructor-large":
|
||||||
docsearch = FAISS.load_local(vectorstore, HuggingFaceInstructEmbeddings())
|
docsearch = FAISS.load_local(vectorstore, HuggingFaceInstructEmbeddings())
|
||||||
elif embeddings_choice == "cohere_medium":
|
elif embeddings_choice == "cohere_medium":
|
||||||
docsearch = FAISS.load_local(vectorstore, CohereEmbeddings(cohere_api_key=embeddings_key))
|
docsearch = FAISS.load_local(vectorstore, CohereEmbeddings(cohere_api_key=embeddings_key))
|
||||||
|
|
||||||
# create a prompt template
|
# create a prompt template
|
||||||
if history:
|
if history:
|
||||||
history = json.loads(history)
|
history = json.loads(history)
|
||||||
template_temp = template_hist.replace("{historyquestion}", history[0]).replace("{historyanswer}", history[1])
|
template_temp = template_hist.replace("{historyquestion}", history[0]).replace("{historyanswer}", history[1])
|
||||||
c_prompt = PromptTemplate(input_variables=["summaries", "question"], template=template_temp, template_format="jinja2")
|
c_prompt = PromptTemplate(input_variables=["summaries", "question"], template=template_temp, template_format="jinja2")
|
||||||
else:
|
else:
|
||||||
c_prompt = PromptTemplate(input_variables=["summaries", "question"], template=template, template_format="jinja2")
|
c_prompt = PromptTemplate(input_variables=["summaries", "question"], template=template, template_format="jinja2")
|
||||||
|
|
||||||
if llm_choice == "openai":
|
if llm_choice == "openai":
|
||||||
llm = OpenAI(openai_api_key=api_key, temperature=0)
|
llm = OpenAI(openai_api_key=api_key, temperature=0)
|
||||||
elif llm_choice == "manifest":
|
elif llm_choice == "manifest":
|
||||||
llm = ManifestWrapper(client=manifest, llm_kwargs={"temperature": 0.001, "max_tokens": 2048})
|
llm = ManifestWrapper(client=manifest, llm_kwargs={"temperature": 0.001, "max_tokens": 2048})
|
||||||
elif llm_choice == "huggingface":
|
elif llm_choice == "huggingface":
|
||||||
llm = HuggingFaceHub(repo_id="bigscience/bloom", huggingfacehub_api_token=api_key)
|
llm = HuggingFaceHub(repo_id="bigscience/bloom", huggingfacehub_api_token=api_key)
|
||||||
elif llm_choice == "cohere":
|
elif llm_choice == "cohere":
|
||||||
llm = Cohere(model="command-xlarge-nightly", cohere_api_key=api_key)
|
llm = Cohere(model="command-xlarge-nightly", cohere_api_key=api_key)
|
||||||
|
|
||||||
qa_chain = load_qa_chain(llm=llm, chain_type="map_reduce",
|
qa_chain = load_qa_chain(llm=llm, chain_type="map_reduce",
|
||||||
combine_prompt=c_prompt)
|
combine_prompt=c_prompt)
|
||||||
|
|
||||||
chain = VectorDBQA(combine_documents_chain=qa_chain, vectorstore=docsearch, k=4)
|
chain = VectorDBQA(combine_documents_chain=qa_chain, vectorstore=docsearch, k=4)
|
||||||
|
|
||||||
# fetch the answer
|
|
||||||
result = chain({"query": question})
|
|
||||||
print(result)
|
|
||||||
|
|
||||||
# some formatting for the frontend
|
# fetch the answer
|
||||||
result['answer'] = result['result']
|
result = chain({"query": question})
|
||||||
result['answer'] = result['answer'].replace("\\n", "<br>")
|
print(result)
|
||||||
result['answer'] = result['answer'].replace("SOURCES:", "")
|
|
||||||
# mock result
|
# some formatting for the frontend
|
||||||
# result = {
|
result['answer'] = result['result']
|
||||||
# "answer": "The answer is 42",
|
result['answer'] = result['answer'].replace("\\n", "<br>")
|
||||||
# "sources": ["https://en.wikipedia.org/wiki/42_(number)", "https://en.wikipedia.org/wiki/42_(number)"]
|
result['answer'] = result['answer'].replace("SOURCES:", "")
|
||||||
# }
|
# mock result
|
||||||
return result
|
# result = {
|
||||||
|
# "answer": "The answer is 42",
|
||||||
|
# "sources": ["https://en.wikipedia.org/wiki/42_(number)", "https://en.wikipedia.org/wiki/42_(number)"]
|
||||||
|
# }
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
print(str(e))
|
||||||
|
return bad_request(500,str(e))
|
||||||
|
|
||||||
|
|
||||||
@app.route("/api/docs_check", methods=["POST"])
|
@app.route("/api/docs_check", methods=["POST"])
|
||||||
|
13
application/error.py
Normal file
13
application/error.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from flask import jsonify
|
||||||
|
from werkzeug.http import HTTP_STATUS_CODES
|
||||||
|
|
||||||
|
def response_error(code_status,message=None):
|
||||||
|
payload = {'error':HTTP_STATUS_CODES.get(code_status,"something went wrong")}
|
||||||
|
if message:
|
||||||
|
payload['message'] = message
|
||||||
|
response = jsonify(payload)
|
||||||
|
response.status_code = code_status
|
||||||
|
return response
|
||||||
|
|
||||||
|
def bad_request(status_code=400,message=''):
|
||||||
|
return response_error(code_status=status_code,message=message)
|
@ -1,55 +1,73 @@
|
|||||||
var el = document.getElementById('message-form');
|
var form = document.getElementById('message-form');
|
||||||
if (el) {
|
var errorModal = document.getElementById('error-alert')
|
||||||
el.addEventListener("submit", function (event) {
|
document.getElementById('close').addEventListener('click',()=>{
|
||||||
console.log("submitting")
|
errorModal.classList.toggle('hidden')
|
||||||
event.preventDefault()
|
})
|
||||||
var message = document.getElementById("message-input").value;
|
|
||||||
msg_html = '<div class="bg-blue-500 text-white p-2 rounded-lg mb-2 self-end"><p class="text-sm">'
|
|
||||||
msg_html += message
|
function submitForm(event){
|
||||||
msg_html += '</p></div>'
|
event.preventDefault()
|
||||||
document.getElementById("messages").innerHTML += msg_html;
|
var message = document.getElementById("message-input").value;
|
||||||
let chatWindow = document.getElementById("messages-container");
|
console.log(message.length)
|
||||||
chatWindow.scrollTop = chatWindow.scrollHeight;
|
if(message.length === 0){
|
||||||
document.getElementById("message-input").value = "";
|
return
|
||||||
document.getElementById("button-submit").innerHTML = '<i class="fa fa-circle-o-notch fa-spin"></i> Thinking...';
|
}
|
||||||
document.getElementById("button-submit").disabled = true;
|
msg_html = '<div class="bg-blue-500 text-white p-2 rounded-lg mb-2 self-end"><p class="text-sm">'
|
||||||
if (localStorage.getItem('activeDocs') == null) {
|
msg_html += message
|
||||||
localStorage.setItem('activeDocs', 'default')
|
msg_html += '</p></div>'
|
||||||
}
|
document.getElementById("messages").innerHTML += msg_html;
|
||||||
|
let chatWindow = document.getElementById("messages-container");
|
||||||
|
chatWindow.scrollTop = chatWindow.scrollHeight;
|
||||||
|
document.getElementById("message-input").value = "";
|
||||||
|
document.getElementById("button-submit").innerHTML = '<i class="fa fa-circle-o-notch fa-spin"></i> Thinking...';
|
||||||
|
document.getElementById("button-submit").disabled = true;
|
||||||
|
if (localStorage.getItem('activeDocs') == null) {
|
||||||
|
localStorage.setItem('activeDocs', 'default')
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fetch('/api/answer', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
|
||||||
|
body: JSON.stringify({question: message,
|
||||||
|
api_key: localStorage.getItem('apiKey'),
|
||||||
|
embeddings_key: localStorage.getItem('apiKey'),
|
||||||
|
history: localStorage.getItem('chatHistory'),
|
||||||
|
active_docs: localStorage.getItem('activeDocs')}),
|
||||||
|
}).then((response)=> response.json())
|
||||||
|
.then(data => {
|
||||||
|
console.log('Success:', data);
|
||||||
|
if(data.error){
|
||||||
|
document.getElementById('text-error').textContent = `Error : ${JSON.stringify(data.message)}`
|
||||||
|
errorModal.classList.toggle('hidden')
|
||||||
|
}
|
||||||
|
if(data.answer){
|
||||||
|
msg_html = '<div class="bg-indigo-500 text-white p-2 rounded-lg mb-2 self-start"><code class="text-sm">'
|
||||||
|
msg_html += data.answer
|
||||||
|
msg_html += '</code></div>'
|
||||||
|
document.getElementById("messages").innerHTML += msg_html;
|
||||||
|
let chatWindow = document.getElementById("messages-container");
|
||||||
|
chatWindow.scrollTop = chatWindow.scrollHeight;
|
||||||
|
}
|
||||||
|
document.getElementById("button-submit").innerHTML = 'Send';
|
||||||
|
document.getElementById("button-submit").disabled = false;
|
||||||
|
let chatHistory = [message, data.answer || ''];
|
||||||
|
localStorage.setItem('chatHistory', JSON.stringify(chatHistory));
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
fetch('/api/answer', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
|
|
||||||
body: JSON.stringify({question: message,
|
|
||||||
api_key: localStorage.getItem('apiKey'),
|
|
||||||
embeddings_key: localStorage.getItem('apiKey'),
|
|
||||||
history: localStorage.getItem('chatHistory'),
|
|
||||||
active_docs: localStorage.getItem('activeDocs')}),
|
|
||||||
})
|
})
|
||||||
.then(response => response.json())
|
.catch((error) => {
|
||||||
.then(data => {
|
console.error('Error:', error);
|
||||||
console.log('Success:', data);
|
// console.log(error);
|
||||||
msg_html = '<div class="bg-indigo-500 text-white p-2 rounded-lg mb-2 self-start"><code class="text-sm">'
|
// document.getElementById("button-submit").innerHTML = 'Send';
|
||||||
msg_html += data.answer
|
// document.getElementById("button-submit").disabled = false;
|
||||||
msg_html += '</code></div>'
|
|
||||||
document.getElementById("messages").innerHTML += msg_html;
|
|
||||||
let chatWindow = document.getElementById("messages-container");
|
|
||||||
chatWindow.scrollTop = chatWindow.scrollHeight;
|
|
||||||
document.getElementById("button-submit").innerHTML = 'Send';
|
|
||||||
document.getElementById("button-submit").disabled = false;
|
|
||||||
let chatHistory = [message, data.answer];
|
|
||||||
localStorage.setItem('chatHistory', JSON.stringify(chatHistory));
|
|
||||||
})
|
|
||||||
.catch((error) => {
|
|
||||||
console.error('Error:', error);
|
|
||||||
console.log(error);
|
|
||||||
document.getElementById("button-submit").innerHTML = 'Send';
|
|
||||||
document.getElementById("button-submit").disabled = false;
|
|
||||||
});
|
|
||||||
|
|
||||||
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
window.addEventListener('submit',submitForm)
|
||||||
|
@ -28,6 +28,17 @@
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
</header>
|
</header>
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Alert Info -->
|
||||||
|
<div class="border flex justify-between
|
||||||
|
w-auto px-4 py-3 rounded relative
|
||||||
|
hidden" style="background-color: rgb(197, 51, 51);color: white;" id="error-alert" role="alert">
|
||||||
|
<span class="block sm:inline" id="text-error"></span>
|
||||||
|
<strong class="text-xl align-center alert-del" style="cursor: pointer;" id="close">×</strong>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
<div class="lg:flex ml-2 mr-2">
|
<div class="lg:flex ml-2 mr-2">
|
||||||
<div class="lg:w-3/4 min-h-screen max-h-screen">
|
<div class="lg:w-3/4 min-h-screen max-h-screen">
|
||||||
<div class="w-full flex flex-col h-5/6">
|
<div class="w-full flex flex-col h-5/6">
|
||||||
@ -60,6 +71,8 @@ This will return a new DataFrame with all the columns from both tables, and only
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="lg:w-1/4 p-2 sm:max-lg:hidden">
|
<div class="lg:w-1/4 p-2 sm:max-lg:hidden">
|
||||||
@ -78,10 +91,15 @@ This will return a new DataFrame with all the columns from both tables, and only
|
|||||||
|
|
||||||
<div class="flex items-center justify-center h-full">
|
<div class="flex items-center justify-center h-full">
|
||||||
|
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
{% if not api_key_set %}
|
{% if not api_key_set %}
|
||||||
<div class="fixed z-10 overflow-y-auto top-0 w-full left-0 hidden" id="modal">
|
|
||||||
|
<div class="fixed z-10 overflow-y-auto top-0 w-full left-0 show" id="modal">
|
||||||
<div class="flex items-center justify-center min-height-100vh pt-4 px-4 pb-20 text-center sm:block sm:p-0">
|
<div class="flex items-center justify-center min-height-100vh pt-4 px-4 pb-20 text-center sm:block sm:p-0">
|
||||||
<div class="fixed inset-0 transition-opacity">
|
<div class="fixed inset-0 transition-opacity">
|
||||||
<div class="absolute inset-0 bg-gray-900 opacity-75" />
|
<div class="absolute inset-0 bg-gray-900 opacity-75" />
|
||||||
@ -105,6 +123,9 @@ This will return a new DataFrame with all the columns from both tables, and only
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
function docsIndex() {
|
function docsIndex() {
|
||||||
// loads latest index from https://raw.githubusercontent.com/arc53/DocsHUB/main/combined.json
|
// loads latest index from https://raw.githubusercontent.com/arc53/DocsHUB/main/combined.json
|
||||||
|
Loading…
Reference in New Issue
Block a user