2023-05-01 00:28:07 +00:00
|
|
|
#include "chat.h"
|
2023-06-22 19:44:49 +00:00
|
|
|
#include "chatlistmodel.h"
|
2023-06-28 20:05:35 +00:00
|
|
|
#include "mysettings.h"
|
2023-06-22 19:44:49 +00:00
|
|
|
#include "modellist.h"
|
2023-05-01 13:10:05 +00:00
|
|
|
#include "network.h"
|
2023-06-04 18:55:05 +00:00
|
|
|
#include "server.h"
|
2023-05-01 13:10:05 +00:00
|
|
|
|
|
|
|
Chat::Chat(QObject *parent)
|
|
|
|
: QObject(parent)
|
|
|
|
, m_id(Network::globalInstance()->generateUniqueId())
|
|
|
|
, m_name(tr("New Chat"))
|
|
|
|
, m_chatModel(new ChatModel(this))
|
|
|
|
, m_responseInProgress(false)
|
2023-05-21 00:04:36 +00:00
|
|
|
, m_responseState(Chat::ResponseStopped)
|
2023-05-04 19:31:41 +00:00
|
|
|
, m_creationDate(QDateTime::currentSecsSinceEpoch())
|
|
|
|
, m_llmodel(new ChatLLM(this))
|
2023-05-11 20:46:25 +00:00
|
|
|
, m_isServer(false)
|
2023-05-13 23:05:35 +00:00
|
|
|
, m_shouldDeleteLater(false)
|
2023-06-20 20:14:30 +00:00
|
|
|
, m_isModelLoaded(false)
|
2023-06-26 13:35:29 +00:00
|
|
|
, m_shouldLoadModelWhenInstalled(false)
|
2023-05-11 20:46:25 +00:00
|
|
|
{
|
|
|
|
connectLLM();
|
|
|
|
}
|
|
|
|
|
|
|
|
Chat::Chat(bool isServer, QObject *parent)
|
|
|
|
: QObject(parent)
|
|
|
|
, m_id(Network::globalInstance()->generateUniqueId())
|
|
|
|
, m_name(tr("Server Chat"))
|
|
|
|
, m_chatModel(new ChatModel(this))
|
|
|
|
, m_responseInProgress(false)
|
2023-05-21 00:04:36 +00:00
|
|
|
, m_responseState(Chat::ResponseStopped)
|
2023-05-11 20:46:25 +00:00
|
|
|
, m_creationDate(QDateTime::currentSecsSinceEpoch())
|
|
|
|
, m_llmodel(new Server(this))
|
|
|
|
, m_isServer(true)
|
2023-05-13 23:05:35 +00:00
|
|
|
, m_shouldDeleteLater(false)
|
2023-06-20 20:14:30 +00:00
|
|
|
, m_isModelLoaded(false)
|
2023-06-26 13:35:29 +00:00
|
|
|
, m_shouldLoadModelWhenInstalled(false)
|
2023-05-12 18:06:03 +00:00
|
|
|
{
|
|
|
|
connectLLM();
|
|
|
|
}
|
|
|
|
|
|
|
|
Chat::~Chat()
|
|
|
|
{
|
|
|
|
delete m_llmodel;
|
|
|
|
m_llmodel = nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
void Chat::connectLLM()
|
2023-05-01 13:10:05 +00:00
|
|
|
{
|
2023-05-04 19:31:41 +00:00
|
|
|
// Should be in different threads
|
2023-05-13 23:05:35 +00:00
|
|
|
connect(m_llmodel, &ChatLLM::isModelLoadedChanged, this, &Chat::handleModelLoadedChanged, Qt::QueuedConnection);
|
2023-05-04 19:31:41 +00:00
|
|
|
connect(m_llmodel, &ChatLLM::responseChanged, this, &Chat::handleResponseChanged, Qt::QueuedConnection);
|
2023-05-21 00:04:36 +00:00
|
|
|
connect(m_llmodel, &ChatLLM::promptProcessing, this, &Chat::promptProcessing, Qt::QueuedConnection);
|
2023-05-01 13:10:05 +00:00
|
|
|
connect(m_llmodel, &ChatLLM::responseStopped, this, &Chat::responseStopped, Qt::QueuedConnection);
|
2023-06-04 18:55:05 +00:00
|
|
|
connect(m_llmodel, &ChatLLM::modelLoadingError, this, &Chat::handleModelLoadingError, Qt::QueuedConnection);
|
2023-05-03 00:31:17 +00:00
|
|
|
connect(m_llmodel, &ChatLLM::recalcChanged, this, &Chat::handleRecalculating, Qt::QueuedConnection);
|
2023-05-02 15:19:17 +00:00
|
|
|
connect(m_llmodel, &ChatLLM::generatedNameChanged, this, &Chat::generatedNameChanged, Qt::QueuedConnection);
|
2023-06-19 18:34:53 +00:00
|
|
|
connect(m_llmodel, &ChatLLM::reportSpeed, this, &Chat::handleTokenSpeedChanged, Qt::QueuedConnection);
|
2023-06-19 22:23:54 +00:00
|
|
|
connect(m_llmodel, &ChatLLM::databaseResultsChanged, this, &Chat::handleDatabaseResultsChanged, Qt::QueuedConnection);
|
2023-06-22 19:44:49 +00:00
|
|
|
connect(m_llmodel, &ChatLLM::modelInfoChanged, this, &Chat::handleModelInfoChanged, Qt::QueuedConnection);
|
2023-05-01 13:10:05 +00:00
|
|
|
|
|
|
|
connect(this, &Chat::promptRequested, m_llmodel, &ChatLLM::prompt, Qt::QueuedConnection);
|
2023-06-22 19:44:49 +00:00
|
|
|
connect(this, &Chat::modelChangeRequested, m_llmodel, &ChatLLM::modelChangeRequested, Qt::QueuedConnection);
|
2023-05-04 19:31:41 +00:00
|
|
|
connect(this, &Chat::loadDefaultModelRequested, m_llmodel, &ChatLLM::loadDefaultModel, Qt::QueuedConnection);
|
|
|
|
connect(this, &Chat::loadModelRequested, m_llmodel, &ChatLLM::loadModel, Qt::QueuedConnection);
|
2023-05-02 15:19:17 +00:00
|
|
|
connect(this, &Chat::generateNameRequested, m_llmodel, &ChatLLM::generateName, Qt::QueuedConnection);
|
2023-05-30 22:17:59 +00:00
|
|
|
connect(this, &Chat::regenerateResponseRequested, m_llmodel, &ChatLLM::regenerateResponse, Qt::QueuedConnection);
|
2023-06-20 20:14:30 +00:00
|
|
|
connect(this, &Chat::resetResponseRequested, m_llmodel, &ChatLLM::resetResponse, Qt::QueuedConnection);
|
|
|
|
connect(this, &Chat::resetContextRequested, m_llmodel, &ChatLLM::resetContext, Qt::QueuedConnection);
|
2023-07-01 15:34:21 +00:00
|
|
|
connect(this, &Chat::processSystemPromptRequested, m_llmodel, &ChatLLM::processSystemPrompt, Qt::QueuedConnection);
|
2023-06-26 13:35:29 +00:00
|
|
|
|
|
|
|
connect(ModelList::globalInstance()->installedModels(), &InstalledModels::countChanged,
|
|
|
|
this, &Chat::handleModelInstalled, Qt::QueuedConnection);
|
2023-05-01 13:10:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Chat::reset()
|
|
|
|
{
|
2023-05-01 16:24:51 +00:00
|
|
|
stopGenerating();
|
2023-05-04 19:31:41 +00:00
|
|
|
// Erase our current on disk representation as we're completely resetting the chat along with id
|
2023-06-22 19:44:49 +00:00
|
|
|
ChatListModel::globalInstance()->removeChatFile(this);
|
2023-06-20 20:14:30 +00:00
|
|
|
emit resetContextRequested();
|
2023-05-01 13:10:05 +00:00
|
|
|
m_id = Network::globalInstance()->generateUniqueId();
|
2023-06-19 23:51:28 +00:00
|
|
|
emit idChanged(m_id);
|
2023-05-04 19:31:41 +00:00
|
|
|
// NOTE: We deliberately do no reset the name or creation date to indictate that this was originally
|
|
|
|
// an older chat that was reset for another purpose. Resetting this data will lead to the chat
|
|
|
|
// name label changing back to 'New Chat' and showing up in the chat model list as a 'New Chat'
|
|
|
|
// further down in the list. This might surprise the user. In the future, we me might get rid of
|
|
|
|
// the "reset context" button in the UI. Right now, by changing the model in the combobox dropdown
|
|
|
|
// we effectively do a reset context. We *have* to do this right now when switching between different
|
|
|
|
// types of models. The only way to get rid of that would be a very long recalculate where we rebuild
|
|
|
|
// the context if we switch between different types of models. Probably the right way to fix this
|
|
|
|
// is to allow switching models but throwing up a dialog warning users if we switch between types
|
|
|
|
// of models that a long recalculation will ensue.
|
2023-05-01 13:10:05 +00:00
|
|
|
m_chatModel->clear();
|
|
|
|
}
|
|
|
|
|
2023-07-01 15:34:21 +00:00
|
|
|
void Chat::processSystemPrompt()
|
|
|
|
{
|
|
|
|
emit processSystemPromptRequested();
|
|
|
|
}
|
|
|
|
|
2023-05-01 13:10:05 +00:00
|
|
|
bool Chat::isModelLoaded() const
|
|
|
|
{
|
2023-06-20 20:14:30 +00:00
|
|
|
return m_isModelLoaded;
|
2023-05-01 13:10:05 +00:00
|
|
|
}
|
|
|
|
|
2023-06-01 21:23:27 +00:00
|
|
|
void Chat::resetResponseState()
|
|
|
|
{
|
|
|
|
if (m_responseInProgress && m_responseState == Chat::LocalDocsRetrieval)
|
|
|
|
return;
|
|
|
|
|
2023-06-19 18:34:53 +00:00
|
|
|
m_tokenSpeed = QString();
|
|
|
|
emit tokenSpeedChanged();
|
2023-06-01 21:23:27 +00:00
|
|
|
m_responseInProgress = true;
|
|
|
|
m_responseState = Chat::LocalDocsRetrieval;
|
|
|
|
emit responseInProgressChanged();
|
|
|
|
emit responseStateChanged();
|
|
|
|
}
|
|
|
|
|
2023-07-01 15:34:21 +00:00
|
|
|
void Chat::prompt(const QString &prompt)
|
2023-05-01 13:10:05 +00:00
|
|
|
{
|
2023-06-01 21:23:27 +00:00
|
|
|
resetResponseState();
|
2023-07-01 15:34:21 +00:00
|
|
|
emit promptRequested( m_collections, prompt);
|
2023-05-01 13:10:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Chat::regenerateResponse()
|
|
|
|
{
|
2023-06-01 21:23:27 +00:00
|
|
|
const int index = m_chatModel->count() - 1;
|
|
|
|
m_chatModel->updateReferences(index, QString(), QList<QString>());
|
2023-05-30 22:17:59 +00:00
|
|
|
emit regenerateResponseRequested();
|
2023-05-01 13:10:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Chat::stopGenerating()
|
|
|
|
{
|
|
|
|
m_llmodel->stopGenerating();
|
|
|
|
}
|
|
|
|
|
|
|
|
QString Chat::response() const
|
|
|
|
{
|
2023-06-20 20:14:30 +00:00
|
|
|
return m_response;
|
2023-05-01 13:10:05 +00:00
|
|
|
}
|
|
|
|
|
2023-05-21 00:04:36 +00:00
|
|
|
QString Chat::responseState() const
|
|
|
|
{
|
|
|
|
switch (m_responseState) {
|
|
|
|
case ResponseStopped: return QStringLiteral("response stopped");
|
2023-05-23 18:51:14 +00:00
|
|
|
case LocalDocsRetrieval: return QStringLiteral("retrieving ") + m_collections.join(", ");
|
|
|
|
case LocalDocsProcessing: return QStringLiteral("processing ") + m_collections.join(", ");
|
|
|
|
case PromptProcessing: return QStringLiteral("processing");
|
2023-05-21 00:04:36 +00:00
|
|
|
case ResponseGeneration: return QStringLiteral("generating response");
|
|
|
|
};
|
|
|
|
Q_UNREACHABLE();
|
|
|
|
return QString();
|
|
|
|
}
|
|
|
|
|
2023-06-20 20:14:30 +00:00
|
|
|
void Chat::handleResponseChanged(const QString &response)
|
2023-05-04 19:31:41 +00:00
|
|
|
{
|
2023-05-21 00:04:36 +00:00
|
|
|
if (m_responseState != Chat::ResponseGeneration) {
|
|
|
|
m_responseState = Chat::ResponseGeneration;
|
|
|
|
emit responseStateChanged();
|
|
|
|
}
|
|
|
|
|
2023-06-20 20:14:30 +00:00
|
|
|
m_response = response;
|
2023-05-04 19:31:41 +00:00
|
|
|
const int index = m_chatModel->count() - 1;
|
2023-06-20 20:14:30 +00:00
|
|
|
m_chatModel->updateValue(index, this->response());
|
2023-05-04 19:31:41 +00:00
|
|
|
emit responseChanged();
|
|
|
|
}
|
|
|
|
|
2023-06-20 20:14:30 +00:00
|
|
|
void Chat::handleModelLoadedChanged(bool loaded)
|
2023-05-13 23:05:35 +00:00
|
|
|
{
|
|
|
|
if (m_shouldDeleteLater)
|
|
|
|
deleteLater();
|
2023-06-20 20:14:30 +00:00
|
|
|
|
|
|
|
if (loaded == m_isModelLoaded)
|
|
|
|
return;
|
|
|
|
|
|
|
|
m_isModelLoaded = loaded;
|
|
|
|
emit isModelLoadedChanged();
|
2023-05-13 23:05:35 +00:00
|
|
|
}
|
|
|
|
|
2023-05-21 00:04:36 +00:00
|
|
|
void Chat::promptProcessing()
|
2023-05-01 13:10:05 +00:00
|
|
|
{
|
2023-06-01 20:12:21 +00:00
|
|
|
m_responseState = !databaseResults().isEmpty() ? Chat::LocalDocsProcessing : Chat::PromptProcessing;
|
2023-05-21 00:04:36 +00:00
|
|
|
emit responseStateChanged();
|
2023-05-01 13:10:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Chat::responseStopped()
|
|
|
|
{
|
2023-06-19 18:34:53 +00:00
|
|
|
m_tokenSpeed = QString();
|
|
|
|
emit tokenSpeedChanged();
|
|
|
|
|
2023-07-05 23:41:07 +00:00
|
|
|
if (MySettings::globalInstance()->localDocsShowReferences()) {
|
|
|
|
const QString chatResponse = response();
|
|
|
|
QList<QString> references;
|
|
|
|
QList<QString> referencesContext;
|
|
|
|
int validReferenceNumber = 1;
|
|
|
|
for (const ResultInfo &info : databaseResults()) {
|
|
|
|
if (info.file.isEmpty())
|
|
|
|
continue;
|
|
|
|
if (validReferenceNumber == 1)
|
|
|
|
references.append((!chatResponse.endsWith("\n") ? "\n" : QString()) + QStringLiteral("\n---"));
|
|
|
|
QString reference;
|
|
|
|
{
|
|
|
|
QTextStream stream(&reference);
|
|
|
|
stream << (validReferenceNumber++) << ". ";
|
|
|
|
if (!info.title.isEmpty())
|
|
|
|
stream << "\"" << info.title << "\". ";
|
|
|
|
if (!info.author.isEmpty())
|
|
|
|
stream << "By " << info.author << ". ";
|
|
|
|
if (!info.date.isEmpty())
|
|
|
|
stream << "Date: " << info.date << ". ";
|
|
|
|
stream << "In " << info.file << ". ";
|
|
|
|
if (info.page != -1)
|
|
|
|
stream << "Page " << info.page << ". ";
|
|
|
|
if (info.from != -1) {
|
|
|
|
stream << "Lines " << info.from;
|
|
|
|
if (info.to != -1)
|
|
|
|
stream << "-" << info.to;
|
|
|
|
stream << ". ";
|
|
|
|
}
|
|
|
|
stream << "[Context](context://" << validReferenceNumber - 1 << ")";
|
2023-05-24 18:49:43 +00:00
|
|
|
}
|
2023-07-05 23:41:07 +00:00
|
|
|
references.append(reference);
|
|
|
|
referencesContext.append(info.text);
|
2023-05-24 18:49:43 +00:00
|
|
|
}
|
|
|
|
|
2023-07-05 23:41:07 +00:00
|
|
|
const int index = m_chatModel->count() - 1;
|
|
|
|
m_chatModel->updateReferences(index, references.join("\n"), referencesContext);
|
|
|
|
emit responseChanged();
|
|
|
|
}
|
2023-05-24 18:49:43 +00:00
|
|
|
|
2023-05-01 13:10:05 +00:00
|
|
|
m_responseInProgress = false;
|
2023-05-21 00:04:36 +00:00
|
|
|
m_responseState = Chat::ResponseStopped;
|
2023-05-01 13:10:05 +00:00
|
|
|
emit responseInProgressChanged();
|
2023-05-21 00:04:36 +00:00
|
|
|
emit responseStateChanged();
|
2023-06-20 20:14:30 +00:00
|
|
|
if (m_generatedName.isEmpty())
|
2023-05-02 15:19:17 +00:00
|
|
|
emit generateNameRequested();
|
2023-05-10 03:43:16 +00:00
|
|
|
if (chatModel()->count() < 3)
|
|
|
|
Network::globalInstance()->sendChatStarted();
|
2023-05-01 13:10:05 +00:00
|
|
|
}
|
|
|
|
|
2023-06-22 19:44:49 +00:00
|
|
|
ModelInfo Chat::modelInfo() const
|
2023-05-01 13:10:05 +00:00
|
|
|
{
|
2023-06-22 19:44:49 +00:00
|
|
|
return m_modelInfo;
|
2023-05-01 13:10:05 +00:00
|
|
|
}
|
|
|
|
|
2023-06-22 19:44:49 +00:00
|
|
|
void Chat::setModelInfo(const ModelInfo &modelInfo)
|
2023-05-01 13:10:05 +00:00
|
|
|
{
|
2023-06-22 19:44:49 +00:00
|
|
|
if (m_modelInfo == modelInfo)
|
2023-06-20 20:14:30 +00:00
|
|
|
return;
|
|
|
|
|
2023-07-11 19:10:59 +00:00
|
|
|
m_isModelLoaded = false;
|
|
|
|
emit isModelLoadedChanged();
|
2023-06-04 18:55:05 +00:00
|
|
|
m_modelLoadingError = QString();
|
|
|
|
emit modelLoadingErrorChanged();
|
2023-06-22 19:44:49 +00:00
|
|
|
m_modelInfo = modelInfo;
|
|
|
|
emit modelInfoChanged();
|
|
|
|
emit modelChangeRequested(modelInfo);
|
2023-05-01 13:10:05 +00:00
|
|
|
}
|
|
|
|
|
2023-05-01 16:30:54 +00:00
|
|
|
void Chat::newPromptResponsePair(const QString &prompt)
|
|
|
|
{
|
2023-06-01 21:23:27 +00:00
|
|
|
resetResponseState();
|
2023-05-11 20:46:25 +00:00
|
|
|
m_chatModel->updateCurrentResponse(m_chatModel->count() - 1, false);
|
2023-05-01 16:30:54 +00:00
|
|
|
m_chatModel->appendPrompt(tr("Prompt: "), prompt);
|
|
|
|
m_chatModel->appendResponse(tr("Response: "), prompt);
|
2023-06-20 20:14:30 +00:00
|
|
|
emit resetResponseRequested();
|
2023-05-01 16:30:54 +00:00
|
|
|
}
|
|
|
|
|
2023-05-11 20:46:25 +00:00
|
|
|
void Chat::serverNewPromptResponsePair(const QString &prompt)
|
|
|
|
{
|
2023-06-01 21:23:27 +00:00
|
|
|
resetResponseState();
|
|
|
|
m_chatModel->updateCurrentResponse(m_chatModel->count() - 1, false);
|
2023-05-11 20:46:25 +00:00
|
|
|
m_chatModel->appendPrompt(tr("Prompt: "), prompt);
|
|
|
|
m_chatModel->appendResponse(tr("Response: "), prompt);
|
|
|
|
}
|
|
|
|
|
2023-05-01 13:10:05 +00:00
|
|
|
bool Chat::isRecalc() const
|
|
|
|
{
|
|
|
|
return m_llmodel->isRecalc();
|
|
|
|
}
|
2023-05-02 00:27:07 +00:00
|
|
|
|
2023-05-13 23:05:35 +00:00
|
|
|
void Chat::unloadAndDeleteLater()
|
|
|
|
{
|
|
|
|
if (!isModelLoaded()) {
|
|
|
|
deleteLater();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
m_shouldDeleteLater = true;
|
|
|
|
unloadModel();
|
|
|
|
}
|
|
|
|
|
2023-05-04 19:31:41 +00:00
|
|
|
void Chat::unloadModel()
|
2023-05-02 00:27:07 +00:00
|
|
|
{
|
|
|
|
stopGenerating();
|
2023-05-13 23:05:35 +00:00
|
|
|
m_llmodel->setShouldBeLoaded(false);
|
2023-05-02 00:27:07 +00:00
|
|
|
}
|
|
|
|
|
2023-05-04 19:31:41 +00:00
|
|
|
void Chat::reloadModel()
|
2023-05-02 00:27:07 +00:00
|
|
|
{
|
2023-06-26 13:35:29 +00:00
|
|
|
// If the installed model list is empty, then we mark a special flag and monitor for when a model
|
|
|
|
// is installed
|
|
|
|
if (!ModelList::globalInstance()->installedModels()->count()) {
|
|
|
|
m_shouldLoadModelWhenInstalled = true;
|
|
|
|
return;
|
|
|
|
}
|
2023-05-13 23:05:35 +00:00
|
|
|
m_llmodel->setShouldBeLoaded(true);
|
2023-05-02 00:27:07 +00:00
|
|
|
}
|
2023-05-02 15:19:17 +00:00
|
|
|
|
2023-06-26 13:35:29 +00:00
|
|
|
void Chat::handleModelInstalled()
|
|
|
|
{
|
|
|
|
if (!m_shouldLoadModelWhenInstalled)
|
|
|
|
return;
|
|
|
|
m_shouldLoadModelWhenInstalled = false;
|
|
|
|
reloadModel();
|
|
|
|
}
|
|
|
|
|
2023-06-20 20:14:30 +00:00
|
|
|
void Chat::generatedNameChanged(const QString &name)
|
2023-05-02 15:19:17 +00:00
|
|
|
{
|
|
|
|
// Only use the first three words maximum and remove newlines and extra spaces
|
2023-06-20 20:14:30 +00:00
|
|
|
m_generatedName = name.simplified();
|
|
|
|
QStringList words = m_generatedName.split(' ', Qt::SkipEmptyParts);
|
2023-05-02 15:19:17 +00:00
|
|
|
int wordCount = qMin(3, words.size());
|
|
|
|
m_name = words.mid(0, wordCount).join(' ');
|
|
|
|
emit nameChanged();
|
|
|
|
}
|
2023-05-03 00:31:17 +00:00
|
|
|
|
|
|
|
void Chat::handleRecalculating()
|
|
|
|
{
|
|
|
|
Network::globalInstance()->sendRecalculatingContext(m_chatModel->count());
|
2023-05-04 19:31:41 +00:00
|
|
|
emit recalcChanged();
|
|
|
|
}
|
|
|
|
|
2023-06-04 18:55:05 +00:00
|
|
|
void Chat::handleModelLoadingError(const QString &error)
|
|
|
|
{
|
|
|
|
qWarning() << "ERROR:" << qPrintable(error) << "id" << id();
|
|
|
|
m_modelLoadingError = error;
|
|
|
|
emit modelLoadingErrorChanged();
|
|
|
|
}
|
|
|
|
|
2023-06-19 18:34:53 +00:00
|
|
|
void Chat::handleTokenSpeedChanged(const QString &tokenSpeed)
|
|
|
|
{
|
|
|
|
m_tokenSpeed = tokenSpeed;
|
|
|
|
emit tokenSpeedChanged();
|
|
|
|
}
|
|
|
|
|
2023-06-19 22:23:54 +00:00
|
|
|
void Chat::handleDatabaseResultsChanged(const QList<ResultInfo> &results)
|
|
|
|
{
|
|
|
|
m_databaseResults = results;
|
|
|
|
}
|
|
|
|
|
2023-06-22 19:44:49 +00:00
|
|
|
void Chat::handleModelInfoChanged(const ModelInfo &modelInfo)
|
|
|
|
{
|
|
|
|
if (m_modelInfo == modelInfo)
|
|
|
|
return;
|
|
|
|
|
|
|
|
m_modelInfo = modelInfo;
|
|
|
|
emit modelInfoChanged();
|
|
|
|
}
|
|
|
|
|
2023-05-08 09:52:57 +00:00
|
|
|
bool Chat::serialize(QDataStream &stream, int version) const
|
2023-05-04 19:31:41 +00:00
|
|
|
{
|
|
|
|
stream << m_creationDate;
|
|
|
|
stream << m_id;
|
|
|
|
stream << m_name;
|
|
|
|
stream << m_userName;
|
2023-07-01 15:34:21 +00:00
|
|
|
if (version > 4)
|
|
|
|
stream << m_modelInfo.id();
|
|
|
|
else
|
|
|
|
stream << m_modelInfo.filename();
|
2023-05-24 18:49:43 +00:00
|
|
|
if (version > 2)
|
|
|
|
stream << m_collections;
|
2023-05-08 09:52:57 +00:00
|
|
|
if (!m_llmodel->serialize(stream, version))
|
2023-05-04 19:31:41 +00:00
|
|
|
return false;
|
2023-05-08 09:52:57 +00:00
|
|
|
if (!m_chatModel->serialize(stream, version))
|
2023-05-04 19:31:41 +00:00
|
|
|
return false;
|
|
|
|
return stream.status() == QDataStream::Ok;
|
|
|
|
}
|
|
|
|
|
2023-05-08 09:52:57 +00:00
|
|
|
bool Chat::deserialize(QDataStream &stream, int version)
|
2023-05-04 19:31:41 +00:00
|
|
|
{
|
|
|
|
stream >> m_creationDate;
|
|
|
|
stream >> m_id;
|
2023-06-19 23:51:28 +00:00
|
|
|
emit idChanged(m_id);
|
2023-05-04 19:31:41 +00:00
|
|
|
stream >> m_name;
|
|
|
|
stream >> m_userName;
|
|
|
|
emit nameChanged();
|
2023-06-22 19:44:49 +00:00
|
|
|
|
2023-07-01 15:34:21 +00:00
|
|
|
QString modelId;
|
|
|
|
stream >> modelId;
|
|
|
|
if (version > 4) {
|
|
|
|
if (!ModelList::globalInstance()->contains(modelId))
|
|
|
|
return false;
|
|
|
|
m_modelInfo = ModelList::globalInstance()->modelInfo(modelId);
|
|
|
|
} else {
|
|
|
|
if (!ModelList::globalInstance()->containsByFilename(modelId))
|
|
|
|
return false;
|
|
|
|
m_modelInfo = ModelList::globalInstance()->modelInfoByFilename(modelId);
|
|
|
|
}
|
2023-06-22 19:44:49 +00:00
|
|
|
emit modelInfoChanged();
|
|
|
|
|
2023-05-08 21:23:02 +00:00
|
|
|
// Prior to version 2 gptj models had a bug that fixed the kv_cache to F32 instead of F16 so
|
|
|
|
// unfortunately, we cannot deserialize these
|
2023-07-01 15:34:21 +00:00
|
|
|
if (version < 2 && m_modelInfo.filename().contains("gpt4all-j"))
|
2023-05-08 21:23:02 +00:00
|
|
|
return false;
|
2023-05-24 18:49:43 +00:00
|
|
|
if (version > 2) {
|
|
|
|
stream >> m_collections;
|
2023-06-19 23:51:28 +00:00
|
|
|
emit collectionListChanged(m_collections);
|
2023-05-24 18:49:43 +00:00
|
|
|
}
|
2023-06-22 19:44:49 +00:00
|
|
|
m_llmodel->setModelInfo(m_modelInfo);
|
2023-05-08 09:52:57 +00:00
|
|
|
if (!m_llmodel->deserialize(stream, version))
|
2023-05-04 19:31:41 +00:00
|
|
|
return false;
|
2023-05-08 09:52:57 +00:00
|
|
|
if (!m_chatModel->deserialize(stream, version))
|
2023-05-04 19:31:41 +00:00
|
|
|
return false;
|
|
|
|
emit chatModelChanged();
|
|
|
|
return stream.status() == QDataStream::Ok;
|
|
|
|
}
|
|
|
|
|
2023-05-23 18:51:14 +00:00
|
|
|
QList<QString> Chat::collectionList() const
|
|
|
|
{
|
|
|
|
return m_collections;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool Chat::hasCollection(const QString &collection) const
|
|
|
|
{
|
|
|
|
return m_collections.contains(collection);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Chat::addCollection(const QString &collection)
|
|
|
|
{
|
|
|
|
if (hasCollection(collection))
|
|
|
|
return;
|
|
|
|
|
|
|
|
m_collections.append(collection);
|
2023-06-19 23:51:28 +00:00
|
|
|
emit collectionListChanged(m_collections);
|
2023-05-23 18:51:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Chat::removeCollection(const QString &collection)
|
|
|
|
{
|
|
|
|
if (!hasCollection(collection))
|
|
|
|
return;
|
|
|
|
|
|
|
|
m_collections.removeAll(collection);
|
2023-06-19 23:51:28 +00:00
|
|
|
emit collectionListChanged(m_collections);
|
2023-05-23 18:51:14 +00:00
|
|
|
}
|