fix: non stream mode cannot use gpt-3.5

pull/13/head
sean1832 1 year ago
parent 229aa6eca6
commit d1bc2aa6c8

@ -48,6 +48,22 @@ def gpt3(prompt, model, params):
return text
def gpt35(prompt, params, system_role_content: str = 'You are a helpful assistant.'):
completions = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
max_tokens=params.max_tokens,
temperature=params.temp,
top_p=params.top_p,
frequency_penalty=params.frequency_penalty,
presence_penalty=params.present_penalty,
messages=[
{"role": "system", "content": system_role_content},
{"role": "user", "content": prompt}
])
text = completions['choices'][0]['message']['content']
return text
def gpt3_stream(prompt, model, params):
response = openai.Completion.create(
model=model,

@ -47,7 +47,10 @@ def run(query, model, prompt_file, isQuestion, params, info_file=None):
prompt = prompt.replace('<<QS>>', query)
prompt = prompt.replace('<<MY-INFO>>', my_info)
answer = GPT.gpt_tools.gpt3(prompt, model, params)
if model == 'gpt-3.5-turbo':
answer = GPT.gpt_tools.gpt35(prompt, params)
else:
answer = GPT.gpt_tools.gpt3(prompt, model, params)
answers.append(answer)
all_response = '\n\n'.join(answers)
else:
@ -55,7 +58,10 @@ def run(query, model, prompt_file, isQuestion, params, info_file=None):
responses = []
for chunk in chunks:
prompt = util.read_file(prompt_file).replace('<<DATA>>', chunk)
response = GPT.gpt_tools.gpt3(prompt, model, params)
if model == 'gpt-3.5-turbo':
response = GPT.gpt_tools.gpt35(prompt, params)
else:
response = GPT.gpt_tools.gpt3(prompt, model, params)
responses.append(response)
all_response = '\n\n'.join(responses)
return all_response

Loading…
Cancel
Save