mirror of
https://github.com/nomic-ai/gpt4all
synced 2024-11-06 09:20:33 +00:00
More conservative default params and trim leading whitespace from response.
This commit is contained in:
parent
f63a2df715
commit
b39acea516
10
llm.cpp
10
llm.cpp
@ -80,9 +80,17 @@ void LLMObject::resetContext()
|
||||
s_ctx = LLModel::PromptContext();
|
||||
}
|
||||
|
||||
std::string remove_leading_whitespace(const std::string& input) {
|
||||
auto first_non_whitespace = std::find_if(input.begin(), input.end(), [](unsigned char c) {
|
||||
return !std::isspace(c) || c == '\n';
|
||||
});
|
||||
|
||||
return std::string(first_non_whitespace, input.end());
|
||||
}
|
||||
|
||||
QString LLMObject::response() const
|
||||
{
|
||||
return QString::fromStdString(m_response);
|
||||
return QString::fromStdString(remove_leading_whitespace(m_response));
|
||||
}
|
||||
|
||||
QString LLMObject::modelName() const
|
||||
|
27
main.qml
27
main.qml
@ -54,11 +54,11 @@ Window {
|
||||
title: qsTr("Settings")
|
||||
height: 600
|
||||
width: 600
|
||||
property real temperature: 0.9
|
||||
property real topP: 0.9
|
||||
property int topK: 40
|
||||
property int maxLength: 4096
|
||||
property int promptBatchSize: 9
|
||||
property real defaultTemperature: 0.7
|
||||
property real defaultTopP: 0.95
|
||||
property int defaultTopK: 40
|
||||
property int defaultMaxLength: 4096
|
||||
property int defaultPromptBatchSize: 9
|
||||
|
||||
property string defaultPromptTemplate: "The prompt below is a question to answer, a task to complete, or a conversation to respond to; decide which and write an appropriate response.
|
||||
### Prompt:
|
||||
@ -66,18 +66,23 @@ Window {
|
||||
### Response:\n"
|
||||
|
||||
property string promptTemplate: ""
|
||||
property real temperature: 0.0
|
||||
property real topP: 0.0
|
||||
property int topK: 0
|
||||
property int maxLength: 0
|
||||
property int promptBatchSize: 0
|
||||
|
||||
function restoreDefaults() {
|
||||
temperature = 0.9;
|
||||
topP = 0.9;
|
||||
topK = 40;
|
||||
maxLength = 4096;
|
||||
promptBatchSize = 9;
|
||||
temperature = defaultTemperature;
|
||||
topP = defaultTopP;
|
||||
topK = defaultTopK;
|
||||
maxLength = defaultMaxLength;
|
||||
promptBatchSize = defaultPromptBatchSize;
|
||||
promptTemplate = defaultPromptTemplate;
|
||||
}
|
||||
|
||||
Component.onCompleted: {
|
||||
promptTemplate = defaultPromptTemplate;
|
||||
restoreDefaults();
|
||||
}
|
||||
|
||||
GridLayout {
|
||||
|
Loading…
Reference in New Issue
Block a user