less token less issues

pull/256/head 0.3.0
Alex 1 year ago
parent 899777632b
commit 577d58c92b

@ -170,7 +170,7 @@ def complete_stream(question, docsearch, chat_history, api_key):
messages_combine.append({"role": "system", "content": i["response"]})
messages_combine.append({"role": "user", "content": question})
completion = openai.ChatCompletion.create(model="gpt-3.5-turbo",
messages=messages_combine, stream=True, max_tokens=1000, temperature=0)
messages=messages_combine, stream=True, max_tokens=500, temperature=0)
for line in completion:
if 'content' in line['choices'][0]['delta']:

Loading…
Cancel
Save