ui: move chunk size out of advanced options

pull/10/head
sean1832 1 year ago
parent 044b4c2e38
commit 4590d68465

@ -64,6 +64,11 @@ with st.sidebar:
"your prompt plus `max_tokens` cannot exceed the model's context length. Most "
"models have a context length of 2048 tokens (except for the newest models, "
"which support 4096)."))
chunk_size = st.slider(_('Chunk size'), 1500, 4500,
value=util.read_json_at(INFO.BRAIN_MEMO, 'chunk_size', 4000),
help=_("The number of tokens to consider at each step. The larger this is, the more "
"context the model has to work with, but the slower generation and expensive "
"will it be."))
with st.expander(label=_('Advanced Options')):
top_p = st.slider(_('Top_P'), 0.0, 1.0, value=util.read_json_at(INFO.BRAIN_MEMO, 'top_p', 1.0),
@ -83,12 +88,6 @@ with st.sidebar:
"new tokens based on their existing frequency in the text so far."
"\n\n[See more information about frequency and presence penalties.]"
"(https://platform.openai.com/docs/api-reference/parameter-details)"))
chunk_size = st.slider(_('Chunk size'), 1500, 4500,
value=util.read_json_at(INFO.BRAIN_MEMO, 'chunk_size', 4000),
help=_("The number of tokens to consider at each step. The larger this is, the more "
"context the model has to work with, but the slower generation and expensive "
"will it be."))
enable_stream = st_toggle.st_toggle_switch(_('Stream (experimental)'),
default_value=util.read_json_at(INFO.BRAIN_MEMO, 'enable_stream',
False))

Loading…
Cancel
Save