From 021366ca67bddcb6c99314dd8af06b2ca9c8792e Mon Sep 17 00:00:00 2001 From: sean1832 Date: Sat, 4 Mar 2023 01:53:55 +1100 Subject: [PATCH] fix: change max token for gpt-3.5 and davinci to 4096 --- Seanium_Brain.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Seanium_Brain.py b/Seanium_Brain.py index db2109f..e4d7605 100644 --- a/Seanium_Brain.py +++ b/Seanium_Brain.py @@ -151,7 +151,9 @@ with body: st_tool.rebuild_brain(chunk_size) if not query == '': if models.question_model == 'text-davinci-003' or 'text-davinci-003' in models.other_models: - max_model_token = 4000 + max_model_token = 4096 + elif models.question_model == 'gpt-3.5-turbo' or 'gpt-3.5-turbo' in models.other_models: + max_model_token = 4096 else: max_model_token = 2048