More conservative default params and trim leading whitespace from response.

This commit is contained in:
Adam Treat 2023-04-16 13:56:18 -04:00
parent 7215b9f3fb
commit f8b962d50a
2 changed files with 25 additions and 12 deletions

10
llm.cpp
View File

@ -80,9 +80,17 @@ void LLMObject::resetContext()
s_ctx = LLModel::PromptContext();
}
std::string remove_leading_whitespace(const std::string& input) {
auto first_non_whitespace = std::find_if(input.begin(), input.end(), [](unsigned char c) {
return !std::isspace(c) || c == '\n';
});
return std::string(first_non_whitespace, input.end());
}
QString LLMObject::response() const
{
return QString::fromStdString(m_response);
return QString::fromStdString(remove_leading_whitespace(m_response));
}
QString LLMObject::modelName() const

View File

@ -54,11 +54,11 @@ Window {
title: qsTr("Settings")
height: 600
width: 600
property real temperature: 0.9
property real topP: 0.9
property int topK: 40
property int maxLength: 4096
property int promptBatchSize: 9
property real defaultTemperature: 0.7
property real defaultTopP: 0.95
property int defaultTopK: 40
property int defaultMaxLength: 4096
property int defaultPromptBatchSize: 9
property string defaultPromptTemplate: "The prompt below is a question to answer, a task to complete, or a conversation to respond to; decide which and write an appropriate response.
### Prompt:
@ -66,18 +66,23 @@ Window {
### Response:\n"
property string promptTemplate: ""
property real temperature: 0.0
property real topP: 0.0
property int topK: 0
property int maxLength: 0
property int promptBatchSize: 0
function restoreDefaults() {
temperature = 0.9;
topP = 0.9;
topK = 40;
maxLength = 4096;
promptBatchSize = 9;
temperature = defaultTemperature;
topP = defaultTopP;
topK = defaultTopK;
maxLength = defaultMaxLength;
promptBatchSize = defaultPromptBatchSize;
promptTemplate = defaultPromptTemplate;
}
Component.onCompleted: {
promptTemplate = defaultPromptTemplate;
restoreDefaults();
}
GridLayout {