From 659ab13665173b20e94bebea2c76d7945fe50d0d Mon Sep 17 00:00:00 2001 From: Adam Treat Date: Sun, 16 Apr 2023 14:57:58 -0400 Subject: [PATCH] Don't allow empty prompts. Context past always equal or greater than zero. --- llm.cpp | 1 + main.qml | 3 +++ 2 files changed, 4 insertions(+) diff --git a/llm.cpp b/llm.cpp index d3c8cba7..1d904511 100644 --- a/llm.cpp +++ b/llm.cpp @@ -67,6 +67,7 @@ bool LLMObject::isModelLoaded() const void LLMObject::resetResponse() { s_ctx.n_past -= m_responseTokens; + s_ctx.n_past = std::max(0, s_ctx.n_past); s_ctx.logits.erase(s_ctx.logits.end() -= m_responseLogits, s_ctx.logits.end()); m_responseTokens = 0; m_responseLogits = 0; diff --git a/main.qml b/main.qml index 2edba61d..04c03cc2 100644 --- a/main.qml +++ b/main.qml @@ -713,6 +713,9 @@ Window { radius: 10 } onAccepted: { + if (textInput.text === "") + return + LLM.stopGenerating() if (chatModel.count) {