From 27599d4a0ace553670fd28d567a97e63b4665d3f Mon Sep 17 00:00:00 2001 From: Adam Treat Date: Tue, 9 May 2023 23:43:16 -0400 Subject: [PATCH] Fix some usage events. --- chat.cpp | 6 +++--- chatllm.cpp | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/chat.cpp b/chat.cpp index c1147bfa..2a6b941f 100644 --- a/chat.cpp +++ b/chat.cpp @@ -107,10 +107,10 @@ void Chat::responseStopped() { m_responseInProgress = false; emit responseInProgressChanged(); - if (m_llmodel->generatedName().isEmpty()) { - Network::globalInstance()->sendChatStarted(); + if (m_llmodel->generatedName().isEmpty()) emit generateNameRequested(); - } + if (chatModel()->count() < 3) + Network::globalInstance()->sendChatStarted(); } QString Chat::modelName() const diff --git a/chatllm.cpp b/chatllm.cpp index 7e21ecff..2ffbc3c7 100644 --- a/chatllm.cpp +++ b/chatllm.cpp @@ -124,7 +124,7 @@ bool ChatLLM::loadModel(const QString &modelName) emit isModelLoadedChanged(); - static bool isFirstLoad = false; + static bool isFirstLoad = true; if (isFirstLoad) { emit sendStartup(); isFirstLoad = false;