mirror of
https://github.com/nomic-ai/gpt4all
synced 2024-11-08 07:10:32 +00:00
01e582f15b
Limitations: 1) Context is not restored for gpt-j models 2) When you switch between different model types in an existing chat the context and all the conversation is lost 3) The settings are not chat or conversation specific 4) The sizes of the chat persisted files are very large due to how much data the llama.cpp backend tries to persist. Need to investigate how we can shrink this.
73 lines
2.3 KiB
C++
73 lines
2.3 KiB
C++
#include "chatlistmodel.h"
|
|
|
|
#include <QFile>
|
|
#include <QDataStream>
|
|
|
|
void ChatListModel::removeChatFile(Chat *chat) const
|
|
{
|
|
QSettings settings;
|
|
QFileInfo settingsInfo(settings.fileName());
|
|
QString settingsPath = settingsInfo.absolutePath();
|
|
QFile file(settingsPath + "/gpt4all-" + chat->id() + ".chat");
|
|
if (!file.exists())
|
|
return;
|
|
bool success = file.remove();
|
|
if (!success)
|
|
qWarning() << "ERROR: Couldn't remove chat file:" << file.fileName();
|
|
}
|
|
|
|
void ChatListModel::saveChats() const
|
|
{
|
|
QSettings settings;
|
|
QFileInfo settingsInfo(settings.fileName());
|
|
QString settingsPath = settingsInfo.absolutePath();
|
|
for (Chat *chat : m_chats) {
|
|
QFile file(settingsPath + "/gpt4all-" + chat->id() + ".chat");
|
|
bool success = file.open(QIODevice::WriteOnly);
|
|
if (!success) {
|
|
qWarning() << "ERROR: Couldn't save chat to file:" << file.fileName();
|
|
continue;
|
|
}
|
|
QDataStream out(&file);
|
|
if (!chat->serialize(out)) {
|
|
qWarning() << "ERROR: Couldn't serialize chat to file:" << file.fileName();
|
|
file.remove();
|
|
}
|
|
file.close();
|
|
}
|
|
}
|
|
|
|
void ChatListModel::restoreChats()
|
|
{
|
|
QSettings settings;
|
|
QFileInfo settingsInfo(settings.fileName());
|
|
QString settingsPath = settingsInfo.absolutePath();
|
|
QDir dir(settingsPath);
|
|
dir.setNameFilters(QStringList() << "gpt4all-*.chat");
|
|
QStringList fileNames = dir.entryList();
|
|
beginResetModel();
|
|
for (QString f : fileNames) {
|
|
QString filePath = settingsPath + "/" + f;
|
|
QFile file(filePath);
|
|
bool success = file.open(QIODevice::ReadOnly);
|
|
if (!success) {
|
|
qWarning() << "ERROR: Couldn't restore chat from file:" << file.fileName();
|
|
continue;
|
|
}
|
|
QDataStream in(&file);
|
|
Chat *chat = new Chat(this);
|
|
if (!chat->deserialize(in)) {
|
|
qWarning() << "ERROR: Couldn't deserialize chat from file:" << file.fileName();
|
|
file.remove();
|
|
} else {
|
|
connect(chat, &Chat::nameChanged, this, &ChatListModel::nameChanged);
|
|
m_chats.append(chat);
|
|
}
|
|
file.close();
|
|
}
|
|
std::sort(m_chats.begin(), m_chats.end(), [](const Chat* a, const Chat* b) {
|
|
return a->creationDate() > b->creationDate();
|
|
});
|
|
endResetModel();
|
|
}
|