use ChatGPT API in flask app

pull/315/head
isafulf 1 year ago
parent fb7a87a8a5
commit 1af8aab27d

@ -42,7 +42,10 @@ def get_answer_from_files(question, session_id, pinecone_index):
break break
files_string += file_string files_string += file_string
prompt = f"Given a question, try to answer it using the content of the file extracts below, and if you cannot answer, or find " \ messages = [
{
"role": "system",
"content": f"Given a question, try to answer it using the content of the file extracts below, and if you cannot answer, or find " \
f"a relevant file, just output \"I couldn't find the answer to that question in your files.\".\n\n" \ f"a relevant file, just output \"I couldn't find the answer to that question in your files.\".\n\n" \
f"If the answer is not contained in the files or if there are no file extracts, respond with \"I couldn't find the answer " \ f"If the answer is not contained in the files or if there are no file extracts, respond with \"I couldn't find the answer " \
f"to that question in your files.\" If the question is not actually a question, respond with \"That's not a valid question.\"\n\n" \ f"to that question in your files.\" If the question is not actually a question, respond with \"That's not a valid question.\"\n\n" \
@ -54,20 +57,19 @@ def get_answer_from_files(question, session_id, pinecone_index):
f"Question: {question}\n\n" \ f"Question: {question}\n\n" \
f"Files:\n{files_string}\n" \ f"Files:\n{files_string}\n" \
f"Answer:" f"Answer:"
},
]
logging.info(f"[get_answer_from_files] prompt: {prompt}") response = openai.ChatCompletion.create(
messages=messages,
response = openai.Completion.create( model=GENERATIVE_MODEL,
prompt=prompt,
temperature=0,
max_tokens=1000, max_tokens=1000,
top_p=1, temperature=0,
frequency_penalty=0,
presence_penalty=0,
engine=GENERATIVE_MODEL,
) )
answer = response.choices[0].text.strip() choices = response["choices"] # type: ignore
answer = choices[0].message.content.strip()
logging.info(f"[get_answer_from_files] answer: {answer}") logging.info(f"[get_answer_from_files] answer: {answer}")
return jsonify({"answer": answer}) return jsonify({"answer": answer})

@ -8,7 +8,7 @@ SERVER_PORT: "8080"
# ---- OPENAI CONFIG ----- # ---- OPENAI CONFIG -----
EMBEDDINGS_MODEL: "text-embedding-ada-002" EMBEDDINGS_MODEL: "text-embedding-ada-002"
GENERATIVE_MODEL: "text-davinci-003" GENERATIVE_MODEL: "gpt-3.5-turbo" # use gpt-4 for better results
EMBEDDING_DIMENSIONS: 1536 EMBEDDING_DIMENSIONS: 1536
TEXT_EMBEDDING_CHUNK_SIZE: 200 TEXT_EMBEDDING_CHUNK_SIZE: 200
# This is the minimum cosine similarity score that a file must have with the search query to be considered relevant # This is the minimum cosine similarity score that a file must have with the search query to be considered relevant

@ -1,11 +1,11 @@
Flask-Cors==3.0.10 Flask-Cors>=3.0.10
openai==0.13.0 openai>=0.27.2
pinecone-client==2.0.13 pinecone-client>=2.0.13
PyPDF2==2.10.4 PyPDF2>=2.10.4
numpy==1.23.2 numpy>=1.23.2
scikit-learn==1.1.2 scikit-learn>=1.1.2
docx2txt==0.8 docx2txt>=0.8
flask>=1.1.4 flask>=1.1.4
jinja2==3.0.1 jinja2>=3.0.1
PyYAML==6.0 PyYAML>=6.0
tiktoken==0.1.2 tiktoken>=0.1.2
Loading…
Cancel
Save