2023-05-01 13:10:05 +00:00
|
|
|
#ifndef CHATLLM_H
|
|
|
|
#define CHATLLM_H
|
|
|
|
|
|
|
|
#include <QObject>
|
|
|
|
#include <QThread>
|
|
|
|
|
2023-05-10 15:46:40 +00:00
|
|
|
#include "../gpt4all-backend/llmodel.h"
|
2023-05-01 13:10:05 +00:00
|
|
|
|
2023-05-04 19:31:41 +00:00
|
|
|
class Chat;
|
2023-05-01 13:10:05 +00:00
|
|
|
class ChatLLM : public QObject
|
|
|
|
{
|
|
|
|
Q_OBJECT
|
|
|
|
Q_PROPERTY(bool isModelLoaded READ isModelLoaded NOTIFY isModelLoadedChanged)
|
|
|
|
Q_PROPERTY(QString response READ response NOTIFY responseChanged)
|
|
|
|
Q_PROPERTY(QString modelName READ modelName WRITE setModelName NOTIFY modelNameChanged)
|
|
|
|
Q_PROPERTY(bool isRecalc READ isRecalc NOTIFY recalcChanged)
|
2023-05-02 15:19:17 +00:00
|
|
|
Q_PROPERTY(QString generatedName READ generatedName NOTIFY generatedNameChanged)
|
2023-05-01 13:10:05 +00:00
|
|
|
|
|
|
|
public:
|
2023-05-08 21:23:02 +00:00
|
|
|
enum ModelType {
|
|
|
|
MPT_,
|
|
|
|
GPTJ_,
|
|
|
|
LLAMA_
|
|
|
|
};
|
|
|
|
|
2023-05-04 19:31:41 +00:00
|
|
|
ChatLLM(Chat *parent);
|
2023-05-12 18:06:03 +00:00
|
|
|
virtual ~ChatLLM();
|
2023-05-01 13:10:05 +00:00
|
|
|
|
|
|
|
bool isModelLoaded() const;
|
|
|
|
void regenerateResponse();
|
|
|
|
void resetResponse();
|
|
|
|
void resetContext();
|
|
|
|
|
|
|
|
void stopGenerating() { m_stopGenerating = true; }
|
|
|
|
|
|
|
|
QString response() const;
|
|
|
|
QString modelName() const;
|
|
|
|
|
|
|
|
void setModelName(const QString &modelName);
|
|
|
|
|
|
|
|
bool isRecalc() const { return m_isRecalc; }
|
|
|
|
|
2023-05-02 15:19:17 +00:00
|
|
|
QString generatedName() const { return QString::fromStdString(m_nameResponse); }
|
|
|
|
|
2023-05-08 09:52:57 +00:00
|
|
|
bool serialize(QDataStream &stream, int version);
|
|
|
|
bool deserialize(QDataStream &stream, int version);
|
2023-05-04 19:31:41 +00:00
|
|
|
|
2023-05-01 13:10:05 +00:00
|
|
|
public Q_SLOTS:
|
2023-05-04 19:31:41 +00:00
|
|
|
bool prompt(const QString &prompt, const QString &prompt_template, int32_t n_predict,
|
|
|
|
int32_t top_k, float top_p, float temp, int32_t n_batch, float repeat_penalty, int32_t repeat_penalty_tokens,
|
|
|
|
int32_t n_threads);
|
|
|
|
bool loadDefaultModel();
|
|
|
|
bool loadModel(const QString &modelName);
|
2023-05-01 13:10:05 +00:00
|
|
|
void modelNameChangeRequested(const QString &modelName);
|
2023-05-04 19:31:41 +00:00
|
|
|
void unloadModel();
|
|
|
|
void reloadModel(const QString &modelName);
|
2023-05-02 15:19:17 +00:00
|
|
|
void generateName();
|
2023-05-04 19:31:41 +00:00
|
|
|
void handleChatIdChanged();
|
2023-05-01 13:10:05 +00:00
|
|
|
|
|
|
|
Q_SIGNALS:
|
|
|
|
void isModelLoadedChanged();
|
2023-05-09 00:51:03 +00:00
|
|
|
void modelLoadingError(const QString &error);
|
2023-05-01 13:10:05 +00:00
|
|
|
void responseChanged();
|
|
|
|
void responseStarted();
|
|
|
|
void responseStopped();
|
|
|
|
void modelNameChanged();
|
|
|
|
void recalcChanged();
|
|
|
|
void sendStartup();
|
|
|
|
void sendModelLoaded();
|
|
|
|
void sendResetContext();
|
2023-05-02 15:19:17 +00:00
|
|
|
void generatedNameChanged();
|
2023-05-04 19:31:41 +00:00
|
|
|
void stateChanged();
|
2023-05-01 13:10:05 +00:00
|
|
|
|
|
|
|
private:
|
|
|
|
void resetContextPrivate();
|
|
|
|
bool handlePrompt(int32_t token);
|
|
|
|
bool handleResponse(int32_t token, const std::string &response);
|
|
|
|
bool handleRecalculate(bool isRecalc);
|
2023-05-02 15:19:17 +00:00
|
|
|
bool handleNamePrompt(int32_t token);
|
|
|
|
bool handleNameResponse(int32_t token, const std::string &response);
|
|
|
|
bool handleNameRecalculate(bool isRecalc);
|
2023-05-04 19:31:41 +00:00
|
|
|
void saveState();
|
|
|
|
void restoreState();
|
2023-05-01 13:10:05 +00:00
|
|
|
|
|
|
|
private:
|
|
|
|
LLModel::PromptContext m_ctx;
|
|
|
|
LLModel *m_llmodel;
|
|
|
|
std::string m_response;
|
2023-05-02 15:19:17 +00:00
|
|
|
std::string m_nameResponse;
|
2023-05-01 13:10:05 +00:00
|
|
|
quint32 m_promptResponseTokens;
|
|
|
|
quint32 m_responseLogits;
|
|
|
|
QString m_modelName;
|
2023-05-08 21:23:02 +00:00
|
|
|
ModelType m_modelType;
|
2023-05-04 19:31:41 +00:00
|
|
|
Chat *m_chat;
|
|
|
|
QByteArray m_state;
|
2023-05-01 13:10:05 +00:00
|
|
|
QThread m_llmThread;
|
|
|
|
std::atomic<bool> m_stopGenerating;
|
|
|
|
bool m_isRecalc;
|
|
|
|
};
|
|
|
|
|
|
|
|
#endif // CHATLLM_H
|