mirror of
https://github.com/nomic-ai/gpt4all
synced 2024-11-08 07:10:32 +00:00
Trim trailing whitespace at the end of generation.
This commit is contained in:
parent
9381a69b2b
commit
4bf4b2a080
18
llm.cpp
18
llm.cpp
@ -88,6 +88,18 @@ std::string remove_leading_whitespace(const std::string& input) {
|
||||
return std::string(first_non_whitespace, input.end());
|
||||
}
|
||||
|
||||
std::string trim_whitespace(const std::string& input) {
|
||||
auto first_non_whitespace = std::find_if(input.begin(), input.end(), [](unsigned char c) {
|
||||
return !std::isspace(c);
|
||||
});
|
||||
|
||||
auto last_non_whitespace = std::find_if(input.rbegin(), input.rend(), [](unsigned char c) {
|
||||
return !std::isspace(c);
|
||||
}).base();
|
||||
|
||||
return std::string(first_non_whitespace, last_non_whitespace);
|
||||
}
|
||||
|
||||
QString LLMObject::response() const
|
||||
{
|
||||
return QString::fromStdString(remove_leading_whitespace(m_response));
|
||||
@ -132,7 +144,13 @@ bool LLMObject::prompt(const QString &prompt, const QString &prompt_template, in
|
||||
qInfo() << instructPrompt << "\n";
|
||||
m_llmodel->prompt(instructPrompt.toStdString(), func, s_ctx, n_predict, top_k, top_p, temp, n_batch);
|
||||
m_responseLogits += s_ctx.logits.size() - logitsBefore;
|
||||
std::string trimmed = trim_whitespace(m_response);
|
||||
if (trimmed != m_response) {
|
||||
m_response = trimmed;
|
||||
emit responseChanged();
|
||||
}
|
||||
emit responseStopped();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user