From 474c5387f97325fc7c44a515302a63fb4e9a5487 Mon Sep 17 00:00:00 2001 From: Adam Treat Date: Thu, 25 May 2023 15:22:45 -0400 Subject: [PATCH] Get the backend as well as the client building/working with msvc. --- gpt4all-backend/CMakeLists.txt | 1 + gpt4all-chat/CMakeLists.txt | 3 +++ gpt4all-chat/download.cpp | 14 +++++++------- gpt4all-chat/download.h | 4 ++-- gpt4all-chat/test_hw.cpp | 2 ++ 5 files changed, 15 insertions(+), 9 deletions(-) diff --git a/gpt4all-backend/CMakeLists.txt b/gpt4all-backend/CMakeLists.txt index 3756884f..0c06b60e 100644 --- a/gpt4all-backend/CMakeLists.txt +++ b/gpt4all-backend/CMakeLists.txt @@ -1,4 +1,5 @@ cmake_minimum_required(VERSION 3.16) +set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS ON) if(APPLE) option(BUILD_UNIVERSAL "Build a Universal binary on macOS" ON) diff --git a/gpt4all-chat/CMakeLists.txt b/gpt4all-chat/CMakeLists.txt index 75952319..1e70988e 100644 --- a/gpt4all-chat/CMakeLists.txt +++ b/gpt4all-chat/CMakeLists.txt @@ -1,5 +1,8 @@ cmake_minimum_required(VERSION 3.16) +set(CMAKE_CXX_STANDARD 20) +set(CMAKE_CXX_STANDARD_REQUIRED ON) + if(APPLE) option(BUILD_UNIVERSAL "Build a Universal binary on macOS" OFF) if(BUILD_UNIVERSAL) diff --git a/gpt4all-chat/download.cpp b/gpt4all-chat/download.cpp index 0f3da1cf..d82b674b 100644 --- a/gpt4all-chat/download.cpp +++ b/gpt4all-chat/download.cpp @@ -370,7 +370,7 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData) QString modelFilename = obj["filename"].toString(); QString modelFilesize = obj["filesize"].toString(); - QString requires = obj["requires"].toString(); + QString requiresVersion = obj["requires"].toString(); QByteArray modelMd5sum = obj["md5sum"].toString().toLatin1().constData(); bool isDefault = obj.contains("isDefault") && obj["isDefault"] == QString("true"); bool bestGPTJ = obj.contains("bestGPTJ") && obj["bestGPTJ"] == QString("true"); @@ -378,9 +378,9 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData) bool bestMPT = obj.contains("bestMPT") && obj["bestMPT"] == QString("true"); QString description = obj["description"].toString(); - if (!requires.isEmpty() - && requires != currentVersion - && compareVersions(requires, currentVersion)) { + if (!requiresVersion.isEmpty() + && requiresVersion != currentVersion + && compareVersions(requiresVersion, currentVersion)) { continue; } @@ -409,7 +409,7 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData) modelInfo.bestLlama = bestLlama; modelInfo.bestMPT = bestMPT; modelInfo.description = description; - modelInfo.requires = requires; + modelInfo.requiresVersion = requiresVersion; m_modelMap.insert(modelInfo.filename, modelInfo); } @@ -423,7 +423,7 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData) modelInfo.isChatGPT = true; modelInfo.filename = "chatgpt-gpt-3.5-turbo"; modelInfo.description = tr("OpenAI's ChatGPT model gpt-3.5-turbo. ") + chatGPTDesc; - modelInfo.requires = "2.4.2"; + modelInfo.requiresVersion = "2.4.2"; QString filePath = downloadLocalModelsPath() + modelInfo.filename + ".txt"; QFileInfo info(filePath); modelInfo.installed = info.exists(); @@ -435,7 +435,7 @@ void Download::parseModelsJsonFile(const QByteArray &jsonData) modelInfo.isChatGPT = true; modelInfo.filename = "chatgpt-gpt-4"; modelInfo.description = tr("OpenAI's ChatGPT model gpt-4. ") + chatGPTDesc; - modelInfo.requires = "2.4.2"; + modelInfo.requiresVersion = "2.4.2"; QString filePath = downloadLocalModelsPath() + modelInfo.filename + ".txt"; QFileInfo info(filePath); modelInfo.installed = info.exists(); diff --git a/gpt4all-chat/download.h b/gpt4all-chat/download.h index 265b8f3e..3f708a33 100644 --- a/gpt4all-chat/download.h +++ b/gpt4all-chat/download.h @@ -22,7 +22,7 @@ struct ModelInfo { Q_PROPERTY(bool bestMPT MEMBER bestMPT) Q_PROPERTY(bool isChatGPT MEMBER isChatGPT) Q_PROPERTY(QString description MEMBER description) - Q_PROPERTY(QString requires MEMBER requires) + Q_PROPERTY(QString requiresVersion MEMBER requiresVersion) public: QString filename; @@ -36,7 +36,7 @@ public: bool bestMPT = false; bool isChatGPT = false; QString description; - QString requires; + QString requiresVersion; }; Q_DECLARE_METATYPE(ModelInfo) diff --git a/gpt4all-chat/test_hw.cpp b/gpt4all-chat/test_hw.cpp index eef10129..9102181d 100644 --- a/gpt4all-chat/test_hw.cpp +++ b/gpt4all-chat/test_hw.cpp @@ -3,6 +3,7 @@ int main(int argc, char *argv[]) { +#if __GNUC__ static bool avx = __builtin_cpu_supports("avx"); static bool avx2 = __builtin_cpu_supports("avx2"); static bool fma = __builtin_cpu_supports("fma"); @@ -25,5 +26,6 @@ int main(int argc, char *argv[]) fflush(stdout); fprintf(stderr, "\" version of gpt4all.\n"); fflush(stderr); +#endif return 0; }