From f1bbe97a5c996221c28697f7bea29e382e8e1782 Mon Sep 17 00:00:00 2001 From: Adam Treat Date: Mon, 10 Apr 2023 23:34:34 -0400 Subject: [PATCH] Big updates to the UI. --- CMakeLists.txt | 18 +- cmake/deploy-qt-linux.cmake.in | 6 +- cmake/deploy-qt-mac.cmake.in | 4 +- icons/logo.svg | 17 ++ llm.cpp | 22 +++ llm.h | 2 + main.qml | 294 ++++++++++++++++++++++++--------- 7 files changed, 273 insertions(+), 90 deletions(-) create mode 100644 icons/logo.svg diff --git a/CMakeLists.txt b/CMakeLists.txt index 78d38cd7..5b8d24b1 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -21,7 +21,11 @@ qt_add_qml_module(chat URI gpt4all-chat VERSION 1.0 QML_FILES main.qml - RESOURCES icons/send_message.svg icons/stop_generating.svg icons/regenerate.svg + RESOURCES + icons/send_message.svg + icons/stop_generating.svg + icons/regenerate.svg + icons/logo.svg ) set_target_properties(chat PROPERTIES @@ -46,7 +50,7 @@ install(TARGETS chat DESTINATION bin COMPONENT ${COMPONENT_NAME_MAIN}) set(CPACK_GENERATOR "IFW") if(${CMAKE_SYSTEM_NAME} MATCHES Linux) - find_program(LINUXDEPLOYQT linuxdeployqt HINTS ${_qt_bin_dir}) + set(LINUXDEPLOYQT "/home/atreat/dev/linuxdeployqt/build/tools/linuxdeployqt/linuxdeployqt") configure_file("${CMAKE_CURRENT_SOURCE_DIR}/cmake/deploy-qt-linux.cmake.in" "${CMAKE_BINARY_DIR}/cmake/deploy-qt-linux.cmake" @ONLY) set(CPACK_PRE_BUILD_SCRIPTS ${CMAKE_BINARY_DIR}/cmake/deploy-qt-linux.cmake) @@ -81,6 +85,10 @@ cpack_add_component(${COMPONENT_NAME_MAIN} DOWNLOADED) cpack_ifw_configure_component(${COMPONENT_NAME_MAIN} ESSENTIAL FORCED_INSTALLATION) cpack_ifw_configure_component(${COMPONENT_NAME_MAIN} LICENSES "GPL-3.0 LICENSE" ${CPACK_RESOURCE_FILE_LICENSE}) -if (WIN32) -cpack_ifw_add_repository("GPT4AllRepository" URL "/path/to/repo/gpt4all-chat/install/windows/") -endif (WIN32) +if(${CMAKE_SYSTEM_NAME} MATCHES Linux) +cpack_ifw_add_repository("GPT4AllRepository" URL "http://localhost/linux/repository") +elseif(${CMAKE_SYSTEM_NAME} MATCHES Windows) +cpack_ifw_add_repository("GPT4AllRepository" URL "http://localhost/windows/repository") +elseif(${CMAKE_SYSTEM_NAME} MATCHES Darwin) +cpack_ifw_add_repository("GPT4AllRepository" URL "http://localhost/mac/repository") +endif() diff --git a/cmake/deploy-qt-linux.cmake.in b/cmake/deploy-qt-linux.cmake.in index 14924e2e..360adf78 100644 --- a/cmake/deploy-qt-linux.cmake.in +++ b/cmake/deploy-qt-linux.cmake.in @@ -1,6 +1,6 @@ -set(LINDEPLOYQT "@WINDEPLOYQT@") +set(LINUXDEPLOYQT "@LINUXDEPLOYQT@") set(COMPONENT_NAME_MAIN "@COMPONENT_NAME_MAIN@") set(CMAKE_CURRENT_SOURCE_DIR "@CMAKE_CURRENT_SOURCE_DIR@") -execute_process(COMMAND ${WINDEPLOYQT} --qmldir ${CMAKE_CURRENT_SOURCE_DIR} ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin) -file(COPY ${CMAKE_CURRENT_SOURCE_DIR}/models/ggml-model-q4_0.bin +execute_process(COMMAND ${LINUXDEPLOYQT} ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin/chat -verbose=2) +file(COPY ${CMAKE_CURRENT_SOURCE_DIR}/CMakeLists.txt DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin) diff --git a/cmake/deploy-qt-mac.cmake.in b/cmake/deploy-qt-mac.cmake.in index 512b9646..dc5beaa9 100644 --- a/cmake/deploy-qt-mac.cmake.in +++ b/cmake/deploy-qt-mac.cmake.in @@ -1,6 +1,6 @@ -set(WINDEPLOYQT "@WINDEPLOYQT@") +set(WINDEPLOYQT "@MACDEPLOYQT@") set(COMPONENT_NAME_MAIN "@COMPONENT_NAME_MAIN@") set(CMAKE_CURRENT_SOURCE_DIR "@CMAKE_CURRENT_SOURCE_DIR@") -execute_process(COMMAND ${WINDEPLOYQT} --qmldir ${CMAKE_CURRENT_SOURCE_DIR} ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin) +execute_process(COMMAND ${MACDEPLOYQT} --qmldir ${CMAKE_CURRENT_SOURCE_DIR} ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin) file(COPY ${CMAKE_CURRENT_SOURCE_DIR}/models/ggml-model-q4_0.bin DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin) diff --git a/icons/logo.svg b/icons/logo.svg new file mode 100644 index 00000000..7d927b8a --- /dev/null +++ b/icons/logo.svg @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + diff --git a/llm.cpp b/llm.cpp index c8c92b7d..728ea96d 100644 --- a/llm.cpp +++ b/llm.cpp @@ -3,6 +3,7 @@ #include #include #include +#include #include #include @@ -143,3 +144,24 @@ void LLM::responseStopped() m_responseInProgress = false; emit responseInProgressChanged(); } + +bool LLM::checkForUpdates() const +{ +#if defined(Q_OS_LINUX) + QString tool("MaintenanceTool"); +#elif defined(Q_OS_WINDOWS) + QString tool("MaintenanceTool.exe"); +#elif defined(Q_OS_DARWIN) + QString tool("MaintenanceTool.app/Contents/MacOS/MaintenanceTool"); +#endif + + QString fileName = QCoreApplication::applicationDirPath() + + QDir::separator() + ".." + QDir::separator() + tool; + if (!QFileInfo::exists(fileName)) { + qDebug() << "Couldn't find tool at" << fileName << "so cannot check for updates!"; + return false; + } + + return QProcess::startDetached(fileName); +} + diff --git a/llm.h b/llm.h index 05013d94..35eb11a0 100644 --- a/llm.h +++ b/llm.h @@ -61,6 +61,8 @@ public: QString response() const; bool responseInProgress() const { return m_responseInProgress; } + Q_INVOKABLE bool checkForUpdates() const; + Q_SIGNALS: void isModelLoadedChanged(); void responseChanged(); diff --git a/main.qml b/main.qml index c1796430..ff10c9b9 100644 --- a/main.qml +++ b/main.qml @@ -9,76 +9,221 @@ Window { height: 720 visible: true title: qsTr("GPT4All Chat") + color: "#d1d5db" -// Rectangle { -// id: conversationList -// width: 300 -// anchors.left: parent.left -// anchors.top: parent.top -// anchors.bottom: parent.bottom -// color: "#202123" + TextField { + id: header + anchors.left: parent.left + anchors.right: parent.right + anchors.top: parent.top + height: 100 + color: "#d1d5db" + padding: 20 + font.pixelSize: 24 + text: "GPT4ALL Model: gpt4all-j" + background: Rectangle { + color: "#202123" + } + focus: false + horizontalAlignment: TextInput.AlignHCenter + } -// Button { -// id: newChat -// text: qsTr("New chat") -// anchors.top: parent.top -// anchors.left: parent.left -// anchors.right: parent.right -// anchors.margins: 15 -// padding: 15 -// background: Rectangle { -// opacity: .5 -// border.color: "#7d7d8e" -// border.width: 1 -// radius: 10 -// color: "#343541" -// } -// } -// } + Image { + anchors.verticalCenter: header.baseline + x: parent.width / 2 + 163 + width: 50 + height: 65 + source: "qrc:/gpt4all-chat/icons/logo.svg" + z: 300 + } + + Button { + id: drawerButton + anchors.left: parent.left + anchors.top: parent.top + anchors.topMargin: 30 + anchors.leftMargin: 30 + width: 60 + height: 40 + z: 200 + padding: 15 + + background: Item { + anchors.fill: parent + + Rectangle { + id: bar1 + color: "#7d7d8e" + width: parent.width + height: 8 + radius: 2 + antialiasing: true + } + + Rectangle { + id: bar2 + anchors.centerIn: parent + color: "#7d7d8e" + width: parent.width + height: 8 + radius: 2 + antialiasing: true + } + + Rectangle { + id: bar3 + anchors.bottom: parent.bottom + color: "#7d7d8e" + width: parent.width + height: 8 + radius: 2 + antialiasing: true + } + + + } + onClicked: { + drawer.visible = !drawer.visible + } + } + + Button { + id: resetContextButton + anchors.right: parent.right + anchors.top: parent.top + anchors.topMargin: 30 + anchors.rightMargin: 30 + width: 60 + height: 40 + z: 200 + padding: 15 + + background: Item { + anchors.fill: parent + Image { + anchors.centerIn: parent + width: 40 + height: 40 + source: "qrc:/gpt4all-chat/icons/regenerate.svg" + } + } + + onClicked: { + LLM.stopGenerating() + LLM.resetContext() + chatModel.clear() + } + } + + Dialog { + id: checkForUpdatesError + anchors.centerIn: parent + modal: false + opacity: 0.9 + Text { + horizontalAlignment: Text.AlignJustify + text: qsTr("ERROR: Update system could not find the MaintenanceTool used
+ to check for updates!

+ Did you install this application using the online installer? If so,
+ the MaintenanceTool executable should be located one directory
+ above where this application resides on your filesystem.

+ If you can't start it manually, then I'm afraid you'll have to
+ reinstall.") + color: "#d1d5db" + } + background: Rectangle { + anchors.fill: parent + color: "#202123" + border.width: 1 + border.color: "white" + radius: 10 + } + } + + Drawer { + id: drawer + y: header.height + width: 0.3 * window.width + height: window.height - y + modal: false + opacity: 0.9 + + background: Rectangle { + height: parent.height + color: "#202123" + } + + Item { + anchors.fill: parent + anchors.margins: 30 + + Label { + id: conversationList + anchors.left: parent.left + anchors.right: parent.right + anchors.top: parent.top + wrapMode: Text.WordWrap + text: qsTr("Chat lists of specific conversations coming soon! Check back often for new features :)") + } + + Label { + id: discordLink + textFormat: Text.RichText + anchors.left: parent.left + anchors.right: parent.right + anchors.top: conversationList.bottom + anchors.topMargin: 20 + wrapMode: Text.WordWrap + text: qsTr("Check out our discord channel https://discord.gg/4M2QFmTt2k") + onLinkActivated: { Qt.openUrlExternally("https://discord.gg/4M2QFmTt2k") } + } + + Label { + id: nomicProps + textFormat: Text.RichText + anchors.left: parent.left + anchors.right: parent.right + anchors.top: discordLink.bottom + anchors.topMargin: 20 + wrapMode: Text.WordWrap + text: qsTr("Thanks to nomic.ai and the community for contributing such great work!") + onLinkActivated: { Qt.openUrlExternally("https://home.nomic.ai") } + } + + Button { + anchors.left: parent.left + anchors.right: parent.right + anchors.bottom: parent.bottom + padding: 15 + contentItem: Text { + text: qsTr("Check for updates...") + horizontalAlignment: Text.AlignHCenter + color: "#d1d5db" + } + + background: Rectangle { + opacity: .5 + border.color: "#7d7d8e" + border.width: 1 + radius: 10 + color: "#343541" + } + + onClicked: { + if (!LLM.checkForUpdates()) + checkForUpdatesError.open() + } + } + } + } Rectangle { id: conversation color: "#343541" -// anchors.left: conversationList.right anchors.left: parent.left anchors.right: parent.right anchors.bottom: parent.bottom - anchors.top: parent.top - - Button { - id: resetContextButton - anchors.right: parent.right - anchors.top: parent.top - anchors.topMargin: 30 - anchors.rightMargin: 30 - width: 60 - height: 60 - z: 200 - padding: 15 - - background: Item { - anchors.fill: parent - Rectangle { - anchors.fill: parent - color: "#343541" - border.color: "#7d7d8e" - border.width: 1 - opacity: 0.5 - radius: 10 - } - Image { - anchors.fill: parent - anchors.margins: 15 - source: "qrc:/gpt4all-chat/icons/regenerate.svg" - } - } - - onClicked: { - LLM.stopGenerating() - LLM.resetContext() - chatModel.clear() - } - } + anchors.top: header.bottom ScrollView { id: scrollView @@ -100,20 +245,6 @@ Window { ListView { id: listView anchors.fill: parent - header: TextField { - id: modelName - width: parent.width - color: "#d1d5db" - padding: 20 - font.pixelSize: 24 - text: "Model: GPT4ALL-J-6B-4bit" - background: Rectangle { - color: "#444654" - } - focus: false - horizontalAlignment: TextInput.AlignHCenter - } - model: chatModel delegate: TextArea { text: currentResponse ? LLM.response : value @@ -190,7 +321,6 @@ Window { source: LLM.responseInProgress ? "qrc:/gpt4all-chat/icons/stop_generating.svg" : "qrc:/gpt4all-chat/icons/regenerate.svg" } leftPadding: 50 - text: LLM.responseInProgress ? qsTr("Stop generating") : qsTr("Regenerate response") onClicked: { if (LLM.responseInProgress) LLM.stopGenerating() @@ -210,6 +340,10 @@ Window { anchors.horizontalCenter: textInput.horizontalCenter anchors.bottomMargin: 40 padding: 15 + contentItem: Text { + text: LLM.responseInProgress ? qsTr("Stop generating") : qsTr("Regenerate response") + color: "#d1d5db" + } background: Rectangle { opacity: .5 border.color: "#7d7d8e" @@ -247,10 +381,10 @@ Window { chatModel.append({"name": qsTr("Prompt: "), "currentResponse": false, "value": textInput.text}) chatModel.append({"name": qsTr("Response: "), "currentResponse": true, "value": "", "prompt": prompt}) -// var contextPrompt; -// for (var i = 0; i < chatModel.count; ++i) -// contextPrompt += chatModel.get(i).value + "\n"; -// prompt = contextPrompt + textInput.text + "\n" + // var contextPrompt; + // for (var i = 0; i < chatModel.count; ++i) + // contextPrompt += chatModel.get(i).value + "\n"; + // prompt = contextPrompt + textInput.text + "\n" LLM.resetResponse() LLM.prompt(prompt)