diff --git a/llm.cpp b/llm.cpp index d3c8cba7..1d904511 100644 --- a/llm.cpp +++ b/llm.cpp @@ -67,6 +67,7 @@ bool LLMObject::isModelLoaded() const void LLMObject::resetResponse() { s_ctx.n_past -= m_responseTokens; + s_ctx.n_past = std::max(0, s_ctx.n_past); s_ctx.logits.erase(s_ctx.logits.end() -= m_responseLogits, s_ctx.logits.end()); m_responseTokens = 0; m_responseLogits = 0; diff --git a/main.qml b/main.qml index 2edba61d..04c03cc2 100644 --- a/main.qml +++ b/main.qml @@ -713,6 +713,9 @@ Window { radius: 10 } onAccepted: { + if (textInput.text === "") + return + LLM.stopGenerating() if (chatModel.count) {