Better error handling when the model fails to load.

pull/913/head
Adam Treat 1 year ago
parent b5971b0d41
commit a84bcccb3a

@ -3,6 +3,7 @@
#include "localdocs.h"
#include "network.h"
#include "download.h"
#include "server.h"
Chat::Chat(QObject *parent)
: QObject(parent)
@ -53,7 +54,7 @@ void Chat::connectLLM()
connect(m_llmodel, &ChatLLM::promptProcessing, this, &Chat::promptProcessing, Qt::QueuedConnection);
connect(m_llmodel, &ChatLLM::responseStopped, this, &Chat::responseStopped, Qt::QueuedConnection);
connect(m_llmodel, &ChatLLM::modelNameChanged, this, &Chat::handleModelNameChanged, Qt::QueuedConnection);
connect(m_llmodel, &ChatLLM::modelLoadingError, this, &Chat::modelLoadingError, Qt::QueuedConnection);
connect(m_llmodel, &ChatLLM::modelLoadingError, this, &Chat::handleModelLoadingError, Qt::QueuedConnection);
connect(m_llmodel, &ChatLLM::recalcChanged, this, &Chat::handleRecalculating, Qt::QueuedConnection);
connect(m_llmodel, &ChatLLM::generatedNameChanged, this, &Chat::generatedNameChanged, Qt::QueuedConnection);
@ -243,6 +244,8 @@ void Chat::setModelName(const QString &modelName)
{
// doesn't block but will unload old model and load new one which the gui can see through changes
// to the isModelLoaded property
m_modelLoadingError = QString();
emit modelLoadingErrorChanged();
emit modelNameChangeRequested(modelName);
}
@ -270,11 +273,15 @@ bool Chat::isRecalc() const
void Chat::loadDefaultModel()
{
m_modelLoadingError = QString();
emit modelLoadingErrorChanged();
emit loadDefaultModelRequested();
}
void Chat::loadModel(const QString &modelName)
{
m_modelLoadingError = QString();
emit modelLoadingErrorChanged();
emit loadModelRequested(modelName);
}
@ -322,6 +329,13 @@ void Chat::handleModelNameChanged()
emit modelNameChanged();
}
void Chat::handleModelLoadingError(const QString &error)
{
qWarning() << "ERROR:" << qPrintable(error) << "id" << id();
m_modelLoadingError = error;
emit modelLoadingErrorChanged();
}
bool Chat::serialize(QDataStream &stream, int version) const
{
stream << m_creationDate;

@ -8,7 +8,6 @@
#include "chatllm.h"
#include "chatmodel.h"
#include "database.h"
#include "server.h"
class Chat : public QObject
{
@ -25,6 +24,7 @@ class Chat : public QObject
Q_PROPERTY(bool isServer READ isServer NOTIFY isServerChanged)
Q_PROPERTY(QString responseState READ responseState NOTIFY responseStateChanged)
Q_PROPERTY(QList<QString> collectionList READ collectionList NOTIFY collectionListChanged)
Q_PROPERTY(QString modelLoadingError READ modelLoadingError NOTIFY modelLoadingErrorChanged)
QML_ELEMENT
QML_UNCREATABLE("Only creatable from c++!")
@ -89,6 +89,8 @@ public:
Q_INVOKABLE void removeCollection(const QString &collection);
void resetResponseState();
QString modelLoadingError() const { return m_modelLoadingError; }
public Q_SLOTS:
void serverNewPromptResponsePair(const QString &prompt);
@ -113,7 +115,7 @@ Q_SIGNALS:
void loadModelRequested(const QString &modelName);
void generateNameRequested();
void modelListChanged();
void modelLoadingError(const QString &error);
void modelLoadingErrorChanged();
void isServerChanged();
void collectionListChanged();
@ -125,12 +127,14 @@ private Q_SLOTS:
void generatedNameChanged();
void handleRecalculating();
void handleModelNameChanged();
void handleModelLoadingError(const QString &error);
private:
QString m_id;
QString m_name;
QString m_userName;
QString m_savedModelName;
QString m_modelLoadingError;
QList<QString> m_collections;
ChatModel *m_chatModel;
bool m_responseInProgress;

@ -233,7 +233,7 @@ void ChatListModel::restoreChat(Chat *chat)
{
chat->setParent(this);
connect(chat, &Chat::nameChanged, this, &ChatListModel::nameChanged);
connect(chat, &Chat::modelLoadingError, this, &ChatListModel::handleModelLoadingError);
connect(chat, &Chat::modelLoadingErrorChanged, this, &ChatListModel::handleModelLoadingError);
if (m_dummyChat) {
beginResetModel();

@ -227,10 +227,9 @@ private Q_SLOTS:
emit dataChanged(index, index, {NameRole});
}
void handleModelLoadingError(const QString &error)
void handleModelLoadingError()
{
Chat *chat = qobject_cast<Chat *>(sender());
qWarning() << "ERROR:" << qPrintable(error) << "id" << chat->id();
removeChat(chat);
}

@ -228,6 +228,11 @@ bool ChatLLM::loadModel(const QString &modelName)
case 'M': m_modelType = LLModelType::MPT_; break;
default: delete std::exchange(m_modelInfo.model, nullptr);
}
} else {
if (!m_isServer)
LLModelStore::globalInstance()->releaseModel(m_modelInfo); // release back into the store
m_modelInfo = LLModelInfo();
emit modelLoadingError(QString("Could not load model due to invalid format for %1").arg(modelName));
}
}
#if defined(DEBUG_MODEL_LOADING)
@ -249,8 +254,8 @@ bool ChatLLM::loadModel(const QString &modelName)
} else {
if (!m_isServer)
LLModelStore::globalInstance()->releaseModel(m_modelInfo); // release back into the store
const QString error = QString("Could not find model %1").arg(modelName);
emit modelLoadingError(error);
m_modelInfo = LLModelInfo();
emit modelLoadingError(QString("Could not find file for model %1").arg(modelName));
}
if (m_modelInfo.model) {
@ -342,8 +347,7 @@ void ChatLLM::setModelName(const QString &modelName)
void ChatLLM::modelNameChangeRequested(const QString &modelName)
{
if (!loadModel(modelName))
qWarning() << "ERROR: Could not load model" << modelName;
loadModel(modelName);
}
bool ChatLLM::handlePrompt(int32_t token)

@ -62,6 +62,10 @@ Window {
if (Network.isActive && !currentChat.responseInProgress)
Network.sendConversation(currentChat.id, getConversationJson());
}
function onModelLoadingErrorChanged() {
if (currentChat.modelLoadingError !== "")
modelLoadingErrorPopup.open()
}
}
function startupDialogs() {
@ -116,6 +120,13 @@ Window {
Accessible.name: title
}
PopupDialog {
id: modelLoadingErrorPopup
anchors.centerIn: parent
shouldTimeOut: false
text: currentChat.modelLoadingError
}
Rectangle {
id: header
anchors.left: parent.left
@ -126,7 +137,7 @@ Window {
Item {
anchors.centerIn: parent
height: childrenRect.height
visible: currentChat.isModelLoaded || currentChat.isServer
visible: currentChat.isModelLoaded || currentChat.modelLoadingError !== "" || currentChat.isServer
Label {
id: modelLabel
@ -150,6 +161,31 @@ Window {
anchors.horizontalCenterOffset: window.width >= 950 ? 0 : Math.max(-((950 - window.width) / 2), -99.5)
enabled: !currentChat.isServer
model: currentChat.modelList
contentItem: Text {
anchors.horizontalCenter: parent.horizontalCenter
leftPadding: 10
rightPadding: 20
text: currentChat.modelLoadingError !== "" ? "Model loading error..." : comboBox.displayText
font: comboBox.font
color: theme.textColor
verticalAlignment: Text.AlignVCenter
horizontalAlignment: Text.AlignHCenter
elide: Text.ElideRight
}
delegate: ItemDelegate {
width: comboBox.width
contentItem: Text {
text: modelData
color: theme.textColor
font: comboBox.font
elide: Text.ElideRight
verticalAlignment: Text.AlignVCenter
}
background: Rectangle {
color: highlighted ? theme.backgroundLight : theme.backgroundDark
}
highlighted: comboBox.highlightedIndex === index
}
Accessible.role: Accessible.ComboBox
Accessible.name: qsTr("ComboBox for displaying/picking the current model")
Accessible.description: qsTr("Use this for picking the current model to use; the first item is the current model")
@ -163,7 +199,7 @@ Window {
Item {
anchors.centerIn: parent
visible: !currentChat.isModelLoaded && !currentChat.isServer
visible: !currentChat.isModelLoaded && currentChat.modelLoadingError === "" && !currentChat.isServer
width: childrenRect.width
height: childrenRect.height
Row {

Loading…
Cancel
Save