mirror of
https://github.com/nomic-ai/gpt4all
synced 2024-11-06 09:20:33 +00:00
Cleanup the chatllm properly.
This commit is contained in:
parent
adabc20a00
commit
e781aaa4c5
@ -11,6 +11,17 @@ Chat::Chat(QObject *parent)
|
||||
, m_responseInProgress(false)
|
||||
, m_creationDate(QDateTime::currentSecsSinceEpoch())
|
||||
, m_llmodel(new ChatLLM(this))
|
||||
{
|
||||
connectLLM();
|
||||
}
|
||||
|
||||
Chat::~Chat()
|
||||
{
|
||||
delete m_llmodel;
|
||||
m_llmodel = nullptr;
|
||||
}
|
||||
|
||||
void Chat::connectLLM()
|
||||
{
|
||||
// Should be in same thread
|
||||
connect(Download::globalInstance(), &Download::modelListChanged, this, &Chat::modelListChanged, Qt::DirectConnection);
|
||||
|
@ -25,6 +25,8 @@ class Chat : public QObject
|
||||
|
||||
public:
|
||||
explicit Chat(QObject *parent = nullptr);
|
||||
virtual ~Chat();
|
||||
void connectLLM();
|
||||
|
||||
QString id() const { return m_id; }
|
||||
QString name() const { return m_userName.isEmpty() ? m_name : m_userName; }
|
||||
|
@ -53,6 +53,13 @@ ChatLLM::ChatLLM(Chat *parent)
|
||||
m_llmThread.start();
|
||||
}
|
||||
|
||||
ChatLLM::~ChatLLM()
|
||||
{
|
||||
m_llmThread.quit();
|
||||
m_llmThread.wait();
|
||||
delete m_llmodel;
|
||||
}
|
||||
|
||||
bool ChatLLM::loadDefaultModel()
|
||||
{
|
||||
const QList<QString> models = m_chat->modelList();
|
||||
|
@ -24,6 +24,7 @@ public:
|
||||
};
|
||||
|
||||
ChatLLM(Chat *parent);
|
||||
virtual ~ChatLLM();
|
||||
|
||||
bool isModelLoaded() const;
|
||||
void regenerateResponse();
|
||||
|
Loading…
Reference in New Issue
Block a user