use ChatGPT API in flask app

pull/315/head
isafulf 1 year ago
parent fb7a87a8a5
commit 1af8aab27d

@ -42,7 +42,10 @@ def get_answer_from_files(question, session_id, pinecone_index):
break
files_string += file_string
prompt = f"Given a question, try to answer it using the content of the file extracts below, and if you cannot answer, or find " \
messages = [
{
"role": "system",
"content": f"Given a question, try to answer it using the content of the file extracts below, and if you cannot answer, or find " \
f"a relevant file, just output \"I couldn't find the answer to that question in your files.\".\n\n" \
f"If the answer is not contained in the files or if there are no file extracts, respond with \"I couldn't find the answer " \
f"to that question in your files.\" If the question is not actually a question, respond with \"That's not a valid question.\"\n\n" \
@ -54,20 +57,19 @@ def get_answer_from_files(question, session_id, pinecone_index):
f"Question: {question}\n\n" \
f"Files:\n{files_string}\n" \
f"Answer:"
},
]
logging.info(f"[get_answer_from_files] prompt: {prompt}")
response = openai.Completion.create(
prompt=prompt,
temperature=0,
response = openai.ChatCompletion.create(
messages=messages,
model=GENERATIVE_MODEL,
max_tokens=1000,
top_p=1,
frequency_penalty=0,
presence_penalty=0,
engine=GENERATIVE_MODEL,
temperature=0,
)
answer = response.choices[0].text.strip()
choices = response["choices"] # type: ignore
answer = choices[0].message.content.strip()
logging.info(f"[get_answer_from_files] answer: {answer}")
return jsonify({"answer": answer})

@ -8,7 +8,7 @@ SERVER_PORT: "8080"
# ---- OPENAI CONFIG -----
EMBEDDINGS_MODEL: "text-embedding-ada-002"
GENERATIVE_MODEL: "text-davinci-003"
GENERATIVE_MODEL: "gpt-3.5-turbo" # use gpt-4 for better results
EMBEDDING_DIMENSIONS: 1536
TEXT_EMBEDDING_CHUNK_SIZE: 200
# This is the minimum cosine similarity score that a file must have with the search query to be considered relevant

@ -1,11 +1,11 @@
Flask-Cors==3.0.10
openai==0.13.0
pinecone-client==2.0.13
PyPDF2==2.10.4
numpy==1.23.2
scikit-learn==1.1.2
docx2txt==0.8
Flask-Cors>=3.0.10
openai>=0.27.2
pinecone-client>=2.0.13
PyPDF2>=2.10.4
numpy>=1.23.2
scikit-learn>=1.1.2
docx2txt>=0.8
flask>=1.1.4
jinja2==3.0.1
PyYAML==6.0
tiktoken==0.1.2
jinja2>=3.0.1
PyYAML>=6.0
tiktoken>=0.1.2
Loading…
Cancel
Save