From 67099f80ba3fcd68c32a215b76c5d3866d142bc0 Mon Sep 17 00:00:00 2001 From: Adam Treat Date: Wed, 21 Feb 2024 09:54:27 -0500 Subject: [PATCH] Add comment to make this clear. Signed-off-by: Adam Treat --- gpt4all-chat/chat.cpp | 2 +- gpt4all-chat/chatllm.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/gpt4all-chat/chat.cpp b/gpt4all-chat/chat.cpp index 8730adbc..62c33a1a 100644 --- a/gpt4all-chat/chat.cpp +++ b/gpt4all-chat/chat.cpp @@ -243,7 +243,7 @@ void Chat::setModelInfo(const ModelInfo &modelInfo) if (m_modelInfo == modelInfo && isModelLoaded()) return; - m_modelLoadingPercentage = std::numeric_limits::min(); + m_modelLoadingPercentage = std::numeric_limits::min(); // small non-zero positive value emit isModelLoadedChanged(); m_modelLoadingError = QString(); emit modelLoadingErrorChanged(); diff --git a/gpt4all-chat/chatllm.cpp b/gpt4all-chat/chatllm.cpp index 4b456e34..bf3f6253 100644 --- a/gpt4all-chat/chatllm.cpp +++ b/gpt4all-chat/chatllm.cpp @@ -222,7 +222,7 @@ bool ChatLLM::loadModel(const ModelInfo &modelInfo) #endif delete m_llModelInfo.model; m_llModelInfo.model = nullptr; - emit modelLoadingPercentageChanged(std::numeric_limits::min()); + emit modelLoadingPercentageChanged(std::numeric_limits::min()); // small non-zero positive value } else if (!m_isServer) { // This is a blocking call that tries to retrieve the model we need from the model store. // If it succeeds, then we just have to restore state. If the store has never had a model