Turn the chat list into a model.

pull/520/head
Adam Treat 1 year ago
parent 679b61ee07
commit a48226613c

@ -58,8 +58,9 @@ set (CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
qt_add_executable(chat qt_add_executable(chat
main.cpp main.cpp
chat.h chat.cpp chatmodel.h chat.h chat.cpp
chatllm.h chatllm.cpp chatllm.h chatllm.cpp
chatmodel.h chatlistmodel.h
download.h download.cpp download.h download.cpp
network.h network.cpp network.h network.cpp
llm.h llm.cpp llm.h llm.cpp

@ -0,0 +1,129 @@
#ifndef CHATLISTMODEL_H
#define CHATLISTMODEL_H
#include <QAbstractListModel>
#include "chat.h"
class ChatListModel : public QAbstractListModel
{
Q_OBJECT
Q_PROPERTY(int count READ count NOTIFY countChanged)
Q_PROPERTY(Chat *currentChat READ currentChat WRITE setCurrentChat NOTIFY currentChatChanged)
public:
explicit ChatListModel(QObject *parent = nullptr)
: QAbstractListModel(parent)
{
if (m_chats.isEmpty())
addChat();
}
enum Roles {
IdRole = Qt::UserRole + 1,
NameRole
};
int rowCount(const QModelIndex &parent = QModelIndex()) const override
{
Q_UNUSED(parent)
return m_chats.size();
}
QVariant data(const QModelIndex &index, int role = Qt::DisplayRole) const override
{
if (!index.isValid() || index.row() < 0 || index.row() >= m_chats.size())
return QVariant();
const Chat *item = m_chats.at(index.row());
switch (role) {
case IdRole:
return item->id();
case NameRole:
return item->name();
}
return QVariant();
}
QHash<int, QByteArray> roleNames() const override
{
QHash<int, QByteArray> roles;
roles[IdRole] = "id";
roles[NameRole] = "name";
return roles;
}
Q_INVOKABLE Chat* addChat()
{
Chat *newChat = new Chat(this);
beginInsertRows(QModelIndex(), m_chats.size(), m_chats.size());
m_chats.append(newChat);
endInsertRows();
emit countChanged();
setCurrentChat(newChat);
return newChat;
}
Q_INVOKABLE void removeChat(Chat* chat)
{
if (!m_chats.contains(chat)) {
qDebug() << "WARNING: Removing chat failed with id" << chat->id();
return;
}
const bool chatIsCurrent = chat == m_currentChat;
emit disconnectChat(chat);
const int index = m_chats.indexOf(chat);
beginRemoveRows(QModelIndex(), index, index);
m_chats.removeAll(chat);
endRemoveRows();
delete chat;
if (m_chats.isEmpty())
addChat();
else
setCurrentChat(m_chats.first());
}
Chat *currentChat() const
{
return m_currentChat;
}
void setCurrentChat(Chat *chat)
{
if (!m_chats.contains(chat)) {
qDebug() << "ERROR: Setting current chat failed with id" << chat->id();
return;
}
if (m_currentChat) {
Q_ASSERT(m_currentChat);
emit disconnect(m_currentChat);
}
emit connectChat(chat);
m_currentChat = chat;
emit currentChatChanged();
}
Q_INVOKABLE Chat* get(int index)
{
if (index < 0 || index >= m_chats.size()) return nullptr;
return m_chats.at(index);
}
int count() const { return m_chats.size(); }
Q_SIGNALS:
void countChanged();
void connectChat(Chat *);
void disconnectChat(Chat *);
void currentChatChanged();
private:
Chat* m_currentChat;
QList<Chat*> m_chats;
};
#endif // CHATITEMMODEL_H

@ -16,7 +16,6 @@ class ChatLLM : public QObject
Q_PROPERTY(bool isRecalc READ isRecalc NOTIFY recalcChanged) Q_PROPERTY(bool isRecalc READ isRecalc NOTIFY recalcChanged)
public: public:
ChatLLM(); ChatLLM();
bool isModelLoaded() const; bool isModelLoaded() const;

@ -1,8 +1,6 @@
#include "llm.h" #include "llm.h"
#include "download.h" #include "download.h"
#include "network.h" #include "network.h"
#include "llmodel/gptj.h"
#include "llmodel/llamamodel.h"
#include <QCoreApplication> #include <QCoreApplication>
#include <QDir> #include <QDir>
@ -21,16 +19,20 @@ LLM *LLM::globalInstance()
LLM::LLM() LLM::LLM()
: QObject{nullptr} : QObject{nullptr}
, m_chatListModel(new ChatListModel(this))
{ {
if (m_chats.isEmpty())
addChat();
connect(Download::globalInstance(), &Download::modelListChanged, connect(Download::globalInstance(), &Download::modelListChanged,
this, &LLM::modelListChanged, Qt::QueuedConnection); this, &LLM::modelListChanged, Qt::QueuedConnection);
connect(m_chatListModel, &ChatListModel::connectChat,
this, &LLM::connectChat, Qt::QueuedConnection);
connect(m_chatListModel, &ChatListModel::disconnectChat,
this, &LLM::disconnectChat, Qt::QueuedConnection);
} }
QList<QString> LLM::modelList() const QList<QString> LLM::modelList() const
{ {
Q_ASSERT(currentChat()); Q_ASSERT(m_chatListModel->currentChat());
const Chat *currentChat = m_chatListModel->currentChat();
// Build a model list from exepath and from the localpath // Build a model list from exepath and from the localpath
QList<QString> list; QList<QString> list;
@ -46,7 +48,7 @@ QList<QString> LLM::modelList() const
QFileInfo info(filePath); QFileInfo info(filePath);
QString name = info.completeBaseName().remove(0, 5); QString name = info.completeBaseName().remove(0, 5);
if (info.exists()) { if (info.exists()) {
if (name == currentChat()->modelName()) if (name == currentChat->modelName())
list.prepend(name); list.prepend(name);
else else
list.append(name); list.append(name);
@ -63,7 +65,7 @@ QList<QString> LLM::modelList() const
QFileInfo info(filePath); QFileInfo info(filePath);
QString name = info.completeBaseName().remove(0, 5); QString name = info.completeBaseName().remove(0, 5);
if (info.exists() && !list.contains(name)) { // don't allow duplicates if (info.exists() && !list.contains(name)) { // don't allow duplicates
if (name == currentChat()->modelName()) if (name == currentChat->modelName())
list.prepend(name); list.prepend(name);
else else
list.append(name); list.append(name);
@ -109,75 +111,8 @@ bool LLM::checkForUpdates() const
bool LLM::isRecalc() const bool LLM::isRecalc() const
{ {
Q_ASSERT(currentChat()); Q_ASSERT(m_chatListModel->currentChat());
return currentChat()->isRecalc(); return m_chatListModel->currentChat()->isRecalc();
}
Chat *LLM::currentChat() const
{
return chatFromId(m_currentChat);
}
QList<QString> LLM::chatList() const
{
return m_chats.keys();
}
QString LLM::addChat()
{
Chat *newChat = new Chat(this);
m_chats.insert(newChat->id(), newChat);
emit chatListChanged();
setCurrentChatFromId(newChat->id());
return newChat->id();
}
void LLM::removeChat(const QString &id)
{
if (!m_chats.contains(id)) {
qDebug() << "WARNING: Removing chat with id" << id;
return;
}
const bool chatIsCurrent = id == m_currentChat;
Chat *chat = m_chats.value(id);
disconnectChat(chat);
m_chats.remove(id);
emit chatListChanged();
delete chat;
if (m_chats.isEmpty())
addChat();
else
setCurrentChatFromId(chatList().first());
}
Chat *LLM::chatFromId(const QString &id) const
{
if (!m_chats.contains(id)) {
qDebug() << "WARNING: Getting chat from id" << id;
return nullptr;
}
return m_chats.value(id);
}
void LLM::setCurrentChatFromId(const QString &id)
{
if (!m_chats.contains(id)) {
qDebug() << "ERROR: Setting current chat from id" << id;
return;
}
// On load this can be empty as we add a new chat in ctor this method will be called
if (!m_currentChat.isEmpty()) {
Chat *curr = currentChat();
Q_ASSERT(curr);
disconnect(curr);
}
Chat *newCurr = m_chats.value(id);
connectChat(newCurr);
m_currentChat = id;
emit currentChatChanged();
} }
void LLM::connectChat(Chat *chat) void LLM::connectChat(Chat *chat)

20
llm.h

@ -4,44 +4,36 @@
#include <QObject> #include <QObject>
#include "chat.h" #include "chat.h"
#include "chatlistmodel.h"
class LLM : public QObject class LLM : public QObject
{ {
Q_OBJECT Q_OBJECT
Q_PROPERTY(QList<QString> modelList READ modelList NOTIFY modelListChanged) Q_PROPERTY(QList<QString> modelList READ modelList NOTIFY modelListChanged)
Q_PROPERTY(bool isRecalc READ isRecalc NOTIFY recalcChanged) Q_PROPERTY(bool isRecalc READ isRecalc NOTIFY recalcChanged)
Q_PROPERTY(Chat *currentChat READ currentChat NOTIFY currentChatChanged) Q_PROPERTY(ChatListModel *chatListModel READ chatListModel NOTIFY chatListModelChanged)
Q_PROPERTY(QList<QString> chatList READ chatList NOTIFY chatListChanged)
public: public:
static LLM *globalInstance(); static LLM *globalInstance();
QList<QString> modelList() const; QList<QString> modelList() const;
bool isRecalc() const; bool isRecalc() const;
Chat *currentChat() const; ChatListModel *chatListModel() const { return m_chatListModel; }
QList<QString> chatList() const;
Q_INVOKABLE QString addChat();
Q_INVOKABLE void removeChat(const QString &id);
Q_INVOKABLE Chat *chatFromId(const QString &id) const;
Q_INVOKABLE void setCurrentChatFromId(const QString &id);
Q_INVOKABLE bool checkForUpdates() const; Q_INVOKABLE bool checkForUpdates() const;
Q_SIGNALS: Q_SIGNALS:
void modelListChanged(); void modelListChanged();
void currentChatChanged();
void recalcChanged(); void recalcChanged();
void chatListChanged();
void responseChanged(); void responseChanged();
void chatListModelChanged();
private: private Q_SLOTS:
void connectChat(Chat *chat); void connectChat(Chat *chat);
void disconnectChat(Chat *chat); void disconnectChat(Chat *chat);
private: private:
QString m_currentChat; ChatListModel *m_chatListModel;
QMap<QString, Chat*> m_chats;
private: private:
explicit LLM(); explicit LLM();

@ -18,7 +18,8 @@ Window {
id: theme id: theme
} }
property var chatModel: LLM.currentChat.chatModel property var currentChat: LLM.chatListModel.currentChat
property var chatModel: currentChat.chatModel
color: theme.textColor color: theme.textColor
@ -94,13 +95,13 @@ Window {
Item { Item {
anchors.centerIn: parent anchors.centerIn: parent
height: childrenRect.height height: childrenRect.height
visible: LLM.currentChat.isModelLoaded visible: currentChat.isModelLoaded
Label { Label {
id: modelLabel id: modelLabel
color: theme.textColor color: theme.textColor
padding: 20 padding: 20
font.pixelSize: 24 font.pixelSize: theme.fontSizeLarger
text: "" text: ""
background: Rectangle { background: Rectangle {
color: theme.backgroundDarkest color: theme.backgroundDarkest
@ -169,17 +170,17 @@ Window {
} }
onActivated: { onActivated: {
LLM.currentChat.stopGenerating() currentChat.stopGenerating()
LLM.currentChat.modelName = comboBox.currentText currentChat.modelName = comboBox.currentText
LLM.currentChat.reset(); currentChat.reset();
} }
} }
} }
BusyIndicator { BusyIndicator {
anchors.centerIn: parent anchors.centerIn: parent
visible: !LLM.currentChat.isModelLoaded visible: !currentChat.isModelLoaded
running: !LLM.currentChat.isModelLoaded running: !currentChat.isModelLoaded
Accessible.role: Accessible.Animation Accessible.role: Accessible.Animation
Accessible.name: qsTr("Busy indicator") Accessible.name: qsTr("Busy indicator")
Accessible.description: qsTr("Displayed when the model is loading") Accessible.description: qsTr("Displayed when the model is loading")
@ -409,7 +410,7 @@ Window {
var string = item.name; var string = item.name;
var isResponse = item.name === qsTr("Response: ") var isResponse = item.name === qsTr("Response: ")
if (item.currentResponse) if (item.currentResponse)
string += LLM.currentChat.response string += currentChat.response
else else
string += chatModel.get(i).value string += chatModel.get(i).value
if (isResponse && item.stopped) if (isResponse && item.stopped)
@ -427,7 +428,7 @@ Window {
var isResponse = item.name === qsTr("Response: ") var isResponse = item.name === qsTr("Response: ")
str += "{\"content\": "; str += "{\"content\": ";
if (item.currentResponse) if (item.currentResponse)
str += JSON.stringify(LLM.currentChat.response) str += JSON.stringify(currentChat.response)
else else
str += JSON.stringify(item.value) str += JSON.stringify(item.value)
str += ", \"role\": \"" + (isResponse ? "assistant" : "user") + "\""; str += ", \"role\": \"" + (isResponse ? "assistant" : "user") + "\"";
@ -471,7 +472,7 @@ Window {
} }
onClicked: { onClicked: {
LLM.currentChat.reset(); currentChat.reset();
} }
} }
@ -555,14 +556,14 @@ Window {
Accessible.description: qsTr("This is the list of prompt/response pairs comprising the actual conversation with the model") Accessible.description: qsTr("This is the list of prompt/response pairs comprising the actual conversation with the model")
delegate: TextArea { delegate: TextArea {
text: currentResponse ? LLM.currentChat.response : (value ? value : "") text: currentResponse ? currentChat.response : (value ? value : "")
width: listView.width width: listView.width
color: theme.textColor color: theme.textColor
wrapMode: Text.WordWrap wrapMode: Text.WordWrap
focus: false focus: false
readOnly: true readOnly: true
font.pixelSize: theme.fontSizeLarge font.pixelSize: theme.fontSizeLarge
cursorVisible: currentResponse ? (LLM.currentChat.response !== "" ? LLM.currentChat.responseInProgress : false) : false cursorVisible: currentResponse ? (currentChat.response !== "" ? currentChat.responseInProgress : false) : false
cursorPosition: text.length cursorPosition: text.length
background: Rectangle { background: Rectangle {
color: name === qsTr("Response: ") ? theme.backgroundLighter : theme.backgroundLight color: name === qsTr("Response: ") ? theme.backgroundLighter : theme.backgroundLight
@ -582,8 +583,8 @@ Window {
anchors.leftMargin: 90 anchors.leftMargin: 90
anchors.top: parent.top anchors.top: parent.top
anchors.topMargin: 5 anchors.topMargin: 5
visible: (currentResponse ? true : false) && LLM.currentChat.response === "" && LLM.currentChat.responseInProgress visible: (currentResponse ? true : false) && currentChat.response === "" && currentChat.responseInProgress
running: (currentResponse ? true : false) && LLM.currentChat.response === "" && LLM.currentChat.responseInProgress running: (currentResponse ? true : false) && currentChat.response === "" && currentChat.responseInProgress
Accessible.role: Accessible.Animation Accessible.role: Accessible.Animation
Accessible.name: qsTr("Busy indicator") Accessible.name: qsTr("Busy indicator")
@ -614,7 +615,7 @@ Window {
window.height / 2 - height / 2) window.height / 2 - height / 2)
x: globalPoint.x x: globalPoint.x
y: globalPoint.y y: globalPoint.y
property string text: currentResponse ? LLM.currentChat.response : (value ? value : "") property string text: currentResponse ? currentChat.response : (value ? value : "")
response: newResponse === undefined || newResponse === "" ? text : newResponse response: newResponse === undefined || newResponse === "" ? text : newResponse
onAccepted: { onAccepted: {
var responseHasChanged = response !== text && response !== newResponse var responseHasChanged = response !== text && response !== newResponse
@ -624,13 +625,13 @@ Window {
chatModel.updateNewResponse(index, response) chatModel.updateNewResponse(index, response)
chatModel.updateThumbsUpState(index, false) chatModel.updateThumbsUpState(index, false)
chatModel.updateThumbsDownState(index, true) chatModel.updateThumbsDownState(index, true)
Network.sendConversation(LLM.currentChat.id, getConversationJson()); Network.sendConversation(currentChat.id, getConversationJson());
} }
} }
Column { Column {
visible: name === qsTr("Response: ") && visible: name === qsTr("Response: ") &&
(!currentResponse || !LLM.currentChat.responseInProgress) && Network.isActive (!currentResponse || !currentChat.responseInProgress) && Network.isActive
anchors.right: parent.right anchors.right: parent.right
anchors.rightMargin: 20 anchors.rightMargin: 20
anchors.top: parent.top anchors.top: parent.top
@ -656,7 +657,7 @@ Window {
chatModel.updateNewResponse(index, "") chatModel.updateNewResponse(index, "")
chatModel.updateThumbsUpState(index, true) chatModel.updateThumbsUpState(index, true)
chatModel.updateThumbsDownState(index, false) chatModel.updateThumbsDownState(index, false)
Network.sendConversation(LLM.currentChat.id, getConversationJson()); Network.sendConversation(currentChat.id, getConversationJson());
} }
} }
@ -729,27 +730,27 @@ Window {
anchors.verticalCenter: parent.verticalCenter anchors.verticalCenter: parent.verticalCenter
anchors.left: parent.left anchors.left: parent.left
anchors.leftMargin: 15 anchors.leftMargin: 15
source: LLM.currentChat.responseInProgress ? "qrc:/gpt4all/icons/stop_generating.svg" : "qrc:/gpt4all/icons/regenerate.svg" source: currentChat.responseInProgress ? "qrc:/gpt4all/icons/stop_generating.svg" : "qrc:/gpt4all/icons/regenerate.svg"
} }
leftPadding: 50 leftPadding: 50
onClicked: { onClicked: {
var index = Math.max(0, chatModel.count - 1); var index = Math.max(0, chatModel.count - 1);
var listElement = chatModel.get(index); var listElement = chatModel.get(index);
if (LLM.currentChat.responseInProgress) { if (currentChat.responseInProgress) {
listElement.stopped = true listElement.stopped = true
LLM.currentChat.stopGenerating() currentChat.stopGenerating()
} else { } else {
LLM.currentChat.regenerateResponse() currentChat.regenerateResponse()
if (chatModel.count) { if (chatModel.count) {
if (listElement.name === qsTr("Response: ")) { if (listElement.name === qsTr("Response: ")) {
chatModel.updateCurrentResponse(index, true); chatModel.updateCurrentResponse(index, true);
chatModel.updateStopped(index, false); chatModel.updateStopped(index, false);
chatModel.updateValue(index, LLM.currentChat.response); chatModel.updateValue(index, currentChat.response);
chatModel.updateThumbsUpState(index, false); chatModel.updateThumbsUpState(index, false);
chatModel.updateThumbsDownState(index, false); chatModel.updateThumbsDownState(index, false);
chatModel.updateNewResponse(index, ""); chatModel.updateNewResponse(index, "");
LLM.currentChat.prompt(listElement.prompt, settingsDialog.promptTemplate, currentChat.prompt(listElement.prompt, settingsDialog.promptTemplate,
settingsDialog.maxLength, settingsDialog.maxLength,
settingsDialog.topK, settingsDialog.topP, settingsDialog.topK, settingsDialog.topP,
settingsDialog.temperature, settingsDialog.temperature,
@ -765,7 +766,7 @@ Window {
anchors.bottomMargin: 40 anchors.bottomMargin: 40
padding: 15 padding: 15
contentItem: Text { contentItem: Text {
text: LLM.currentChat.responseInProgress ? qsTr("Stop generating") : qsTr("Regenerate response") text: currentChat.responseInProgress ? qsTr("Stop generating") : qsTr("Regenerate response")
color: theme.textColor color: theme.textColor
Accessible.role: Accessible.Button Accessible.role: Accessible.Button
Accessible.name: text Accessible.name: text
@ -793,7 +794,7 @@ Window {
color: theme.textColor color: theme.textColor
padding: 20 padding: 20
rightPadding: 40 rightPadding: 40
enabled: LLM.currentChat.isModelLoaded enabled: currentChat.isModelLoaded
wrapMode: Text.WordWrap wrapMode: Text.WordWrap
font.pixelSize: theme.fontSizeLarge font.pixelSize: theme.fontSizeLarge
placeholderText: qsTr("Send a message...") placeholderText: qsTr("Send a message...")
@ -817,16 +818,16 @@ Window {
if (textInput.text === "") if (textInput.text === "")
return return
LLM.currentChat.stopGenerating() currentChat.stopGenerating()
if (chatModel.count) { if (chatModel.count) {
var index = Math.max(0, chatModel.count - 1); var index = Math.max(0, chatModel.count - 1);
var listElement = chatModel.get(index); var listElement = chatModel.get(index);
chatModel.updateCurrentResponse(index, false); chatModel.updateCurrentResponse(index, false);
chatModel.updateValue(index, LLM.currentChat.response); chatModel.updateValue(index, currentChat.response);
} }
LLM.currentChat.newPromptResponsePair(textInput.text); currentChat.newPromptResponsePair(textInput.text);
LLM.currentChat.prompt(textInput.text, settingsDialog.promptTemplate, currentChat.prompt(textInput.text, settingsDialog.promptTemplate,
settingsDialog.maxLength, settingsDialog.maxLength,
settingsDialog.topK, settingsDialog.topK,
settingsDialog.topP, settingsDialog.topP,

@ -84,9 +84,10 @@ bool Network::packageAndSendJson(const QString &ingestId, const QString &json)
} }
Q_ASSERT(doc.isObject()); Q_ASSERT(doc.isObject());
Q_ASSERT(LLM::globalInstance()->chatListModel()->currentChat());
QJsonObject object = doc.object(); QJsonObject object = doc.object();
object.insert("source", "gpt4all-chat"); object.insert("source", "gpt4all-chat");
object.insert("agent_id", LLM::globalInstance()->currentChat()->modelName()); object.insert("agent_id", LLM::globalInstance()->chatListModel()->currentChat()->modelName());
object.insert("submitter_id", m_uniqueId); object.insert("submitter_id", m_uniqueId);
object.insert("ingest_id", ingestId); object.insert("ingest_id", ingestId);
@ -220,6 +221,7 @@ void Network::sendMixpanelEvent(const QString &ev)
if (!m_usageStatsActive) if (!m_usageStatsActive)
return; return;
Q_ASSERT(LLM::globalInstance()->chatListModel()->currentChat());
QJsonObject properties; QJsonObject properties;
properties.insert("token", "ce362e568ddaee16ed243eaffb5860a2"); properties.insert("token", "ce362e568ddaee16ed243eaffb5860a2");
properties.insert("time", QDateTime::currentSecsSinceEpoch()); properties.insert("time", QDateTime::currentSecsSinceEpoch());
@ -230,7 +232,7 @@ void Network::sendMixpanelEvent(const QString &ev)
properties.insert("ip", m_ipify); properties.insert("ip", m_ipify);
properties.insert("name", QCoreApplication::applicationName() + " v" properties.insert("name", QCoreApplication::applicationName() + " v"
+ QCoreApplication::applicationVersion()); + QCoreApplication::applicationVersion());
properties.insert("model", LLM::globalInstance()->currentChat()->modelName()); properties.insert("model", LLM::globalInstance()->chatListModel()->currentChat()->modelName());
QJsonObject event; QJsonObject event;
event.insert("event", ev); event.insert("event", ev);

@ -25,27 +25,68 @@ Drawer {
Item { Item {
anchors.fill: parent anchors.fill: parent
anchors.margins: 30 anchors.margins: 10
Accessible.role: Accessible.Pane Accessible.role: Accessible.Pane
Accessible.name: qsTr("Drawer on the left of the application") Accessible.name: qsTr("Drawer on the left of the application")
Accessible.description: qsTr("Drawer that is revealed by pressing the hamburger button") Accessible.description: qsTr("Drawer that is revealed by pressing the hamburger button")
Label { Button {
id: newChat
anchors.left: parent.left
anchors.right: parent.right
padding: 15
font.pixelSize: theme.fontSizeLarger
background: Rectangle {
color: theme.backgroundDarkest
opacity: .5
border.color: theme.backgroundLightest
border.width: 1
radius: 10
}
contentItem: Text {
text: qsTr("New chat")
horizontalAlignment: Text.AlignHCenter
color: theme.textColor
Accessible.role: Accessible.Button
Accessible.name: text
Accessible.description: qsTr("Use this to launch an external application that will check for updates to the installer")
}
onClicked: {
LLM.chatListModel.addChat();
}
}
ListView {
id: conversationList id: conversationList
anchors.left: parent.left anchors.left: parent.left
anchors.right: parent.right anchors.right: parent.right
anchors.top: parent.top anchors.topMargin: 10
wrapMode: Text.WordWrap anchors.top: newChat.bottom
text: qsTr("Chat lists of specific conversations coming soon! Check back often for new features :)") anchors.bottom: checkForUpdatesButton.top
color: theme.textColor model: LLM.chatListModel
delegate: Label {
id: chatLabel
anchors.left: parent.left
anchors.right: parent.right
color: theme.textColor
padding: 15
font.pixelSize: theme.fontSizeLarger
text: name
background: Rectangle {
color: index % 2 === 0 ? theme.backgroundLight : theme.backgroundLighter
}
horizontalAlignment: TextInput.AlignLeft
}
Accessible.role: Accessible.Paragraph Accessible.role: Accessible.List
Accessible.name: qsTr("Coming soon") Accessible.name: qsTr("List of chats")
Accessible.description: text Accessible.description: qsTr("List of chats in the drawer dialog")
} }
Label { /*Label {
id: discordLink id: discordLink
textFormat: Text.RichText textFormat: Text.RichText
anchors.left: parent.left anchors.left: parent.left
@ -78,13 +119,14 @@ Drawer {
Accessible.role: Accessible.Paragraph Accessible.role: Accessible.Paragraph
Accessible.name: qsTr("Thank you blurb") Accessible.name: qsTr("Thank you blurb")
Accessible.description: qsTr("Contains embedded link to https://home.nomic.ai") Accessible.description: qsTr("Contains embedded link to https://home.nomic.ai")
} }*/
Button { Button {
id: checkForUpdatesButton
anchors.left: parent.left anchors.left: parent.left
anchors.right: parent.right anchors.right: parent.right
anchors.bottom: downloadButton.top anchors.bottom: downloadButton.top
anchors.bottomMargin: 20 anchors.bottomMargin: 10
padding: 15 padding: 15
contentItem: Text { contentItem: Text {
text: qsTr("Check for updates...") text: qsTr("Check for updates...")

Loading…
Cancel
Save