feat: Add support for GPT-3.5-Turbo (ChatGPT) model

This commit adds support for the GPT-3.5-Turbo model in ChatGPT. This model is an improved version of the existing GPT-3 model and offers better performance and accuracy in language generation tasks.
pull/13/head
sean1832 1 year ago
parent 559dd4700e
commit 680e542607

@ -79,11 +79,8 @@ def get_stream_prompt(query, prompt_file, isQuestion, info_file=None):
def run_stream(query, model, prompt_file, isQuestion, params, info_file=None):
prompt = get_stream_prompt(query, prompt_file, isQuestion, info_file)
client = GPT.gpt_tools.gpt3_stream(prompt, model, params)
return client
def run_35_Stream(query, prompt_file, isQuestion, params, info_file=None):
prompt = get_stream_prompt(query, prompt_file, isQuestion, info_file)
client = GPT.gpt_tools.gpt35_stream(prompt, params)
if model == 'gpt-3.5-turbo':
client = GPT.gpt_tools.gpt35_stream(prompt, params)
else:
client = GPT.gpt_tools.gpt3_stream(prompt, model, params)
return client

@ -257,7 +257,15 @@ def process_response_stream(query, target_model, prompt_file: str, params: GPT.m
if choice['finish_reason'] == 'length':
st.warning("⚠️ " + _('Result cut off. max_tokens') + f' ({params.max_tokens}) ' + _('too small. Consider increasing max_tokens.'))
break
char = choice['text']
if 'gpt-3.5-turbo' in target_model:
delta = choice['delta']
if "role" in delta or delta == {}:
char = ''
else:
char = delta['content']
else:
char = choice['text']
response = previous_chars + char
response_panel.info(f'{response}')
previous_chars += char
@ -312,8 +320,14 @@ def execute_brain(q, params: GPT.model.param,
if choice['finish_reason'] == 'length':
st.warning("⚠️ " + _('Result cut off. max_tokens') + f' ({params.max_tokens}) ' + _('too small. Consider increasing max_tokens.'))
break
char = choice['text']
if 'gpt-3.5-turbo' in model.question_model:
delta = choice['delta']
if "role" in delta or delta == {}:
char = ''
else:
char = delta['content']
else:
char = choice['text']
answer = previous_chars + char
if is_question_selected:
answer_panel.info(f'{answer}')

Loading…
Cancel
Save