mirror of
https://github.com/nomic-ai/gpt4all
synced 2024-11-04 12:00:10 +00:00
Start moving toward a single authoritative class for all settings. This
is necessary to get rid of technical debt before we drastically increase the complexity of settings by adding per model settings and mirostat and other fun things. Right now the settings are divided between QML and C++ and some convenience methods to deal with settings sync and so on that are in other singletons. This change consolidates all the logic for settings into a single class with a single API for both C++ and QML.
This commit is contained in:
parent
390994ea5e
commit
705b480d72
@ -21,7 +21,6 @@ int main(int argc, char *argv[])
|
||||
QCoreApplication::setOrganizationDomain("gpt4all.io");
|
||||
QCoreApplication::setApplicationName("GPT4All");
|
||||
QCoreApplication::setApplicationVersion(APP_VERSION);
|
||||
QSettings::setDefaultFormat(QSettings::IniFormat);
|
||||
|
||||
Logger::globalInstance();
|
||||
|
||||
|
@ -1,6 +1,52 @@
|
||||
#include "mysettings.h"
|
||||
|
||||
#include <QDir>
|
||||
#include <QFile>
|
||||
#include <QFileInfo>
|
||||
#include <QSettings>
|
||||
#include <QStandardPaths>
|
||||
|
||||
static double default_temperature = 0.7;
|
||||
static double default_topP = 0.1;
|
||||
static int default_topK = 40;
|
||||
static int default_maxLength = 4096;
|
||||
static int default_promptBatchSize = 128;
|
||||
static double default_repeatPenalty = 1.18;
|
||||
static int default_repeatPenaltyTokens = 64;
|
||||
static QString default_promptTemplate = "### Human:\n%1\n### Assistant:\n";
|
||||
static int default_threadCount = 0;
|
||||
static bool default_saveChats = false;
|
||||
static bool default_saveChatGPTChats = true;
|
||||
static bool default_serverChat = false;
|
||||
static QString default_userDefaultModel = "Application default";
|
||||
static bool default_forceMetal = false;
|
||||
|
||||
static QString defaultLocalModelsPath()
|
||||
{
|
||||
QString localPath = QStandardPaths::writableLocation(QStandardPaths::AppLocalDataLocation)
|
||||
+ "/";
|
||||
QString testWritePath = localPath + QString("test_write.txt");
|
||||
QString canonicalLocalPath = QFileInfo(localPath).canonicalFilePath() + "/";
|
||||
QDir localDir(localPath);
|
||||
if (!localDir.exists()) {
|
||||
if (!localDir.mkpath(localPath)) {
|
||||
qWarning() << "ERROR: Local download directory can't be created:" << canonicalLocalPath;
|
||||
return canonicalLocalPath;
|
||||
}
|
||||
}
|
||||
|
||||
if (QFileInfo::exists(testWritePath))
|
||||
return canonicalLocalPath;
|
||||
|
||||
QFile testWriteFile(testWritePath);
|
||||
if (testWriteFile.open(QIODeviceBase::ReadWrite)) {
|
||||
testWriteFile.close();
|
||||
return canonicalLocalPath;
|
||||
}
|
||||
|
||||
qWarning() << "ERROR: Local download path appears not writeable:" << canonicalLocalPath;
|
||||
return canonicalLocalPath;
|
||||
}
|
||||
|
||||
class MyPrivateSettings: public MySettings { };
|
||||
Q_GLOBAL_STATIC(MyPrivateSettings, settingsInstance)
|
||||
@ -12,6 +58,282 @@ MySettings *MySettings::globalInstance()
|
||||
MySettings::MySettings()
|
||||
: QObject{nullptr}
|
||||
{
|
||||
QSettings::setDefaultFormat(QSettings::IniFormat);
|
||||
}
|
||||
|
||||
void MySettings::restoreGenerationDefaults()
|
||||
{
|
||||
setTemperature(default_temperature);
|
||||
setTopP(default_topP);
|
||||
setTopK(default_topK);
|
||||
setMaxLength(default_maxLength);
|
||||
setPromptBatchSize(default_promptBatchSize);
|
||||
setRepeatPenalty(default_repeatPenalty);
|
||||
setRepeatPenaltyTokens(default_repeatPenaltyTokens);
|
||||
setPromptTemplate(default_promptTemplate);
|
||||
}
|
||||
|
||||
void MySettings::restoreApplicationDefaults()
|
||||
{
|
||||
setThreadCount(default_threadCount);
|
||||
setSaveChats(default_saveChats);
|
||||
setSaveChatGPTChats(default_saveChatGPTChats);
|
||||
setServerChat(default_serverChat);
|
||||
setModelPath(defaultLocalModelsPath());
|
||||
setUserDefaultModel(default_userDefaultModel);
|
||||
setForceMetal(default_forceMetal);
|
||||
}
|
||||
|
||||
double MySettings::temperature() const
|
||||
{
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
return setting.value("temperature", default_temperature).toDouble();
|
||||
}
|
||||
|
||||
void MySettings::setTemperature(double t)
|
||||
{
|
||||
if (temperature() == t)
|
||||
return;
|
||||
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
setting.setValue("temperature", t);
|
||||
emit temperatureChanged();
|
||||
}
|
||||
|
||||
double MySettings::topP() const
|
||||
{
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
return setting.value("topP", default_topP).toDouble();
|
||||
}
|
||||
|
||||
void MySettings::setTopP(double p)
|
||||
{
|
||||
if (topP() == p)
|
||||
return;
|
||||
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
setting.setValue("topP", p);
|
||||
emit topPChanged();
|
||||
}
|
||||
|
||||
int MySettings::topK() const
|
||||
{
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
return setting.value("topK", default_topK).toInt();
|
||||
}
|
||||
|
||||
void MySettings::setTopK(int k)
|
||||
{
|
||||
if (topK() == k)
|
||||
return;
|
||||
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
setting.setValue("topK", k);
|
||||
emit topKChanged();
|
||||
}
|
||||
|
||||
int MySettings::maxLength() const
|
||||
{
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
return setting.value("maxLength", default_maxLength).toInt();
|
||||
}
|
||||
|
||||
void MySettings::setMaxLength(int l)
|
||||
{
|
||||
if (maxLength() == l)
|
||||
return;
|
||||
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
setting.setValue("maxLength", l);
|
||||
emit maxLengthChanged();
|
||||
}
|
||||
|
||||
int MySettings::promptBatchSize() const
|
||||
{
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
return setting.value("promptBatchSize", default_promptBatchSize).toInt();
|
||||
}
|
||||
|
||||
void MySettings::setPromptBatchSize(int s)
|
||||
{
|
||||
if (promptBatchSize() == s)
|
||||
return;
|
||||
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
setting.setValue("promptBatchSize", s);
|
||||
emit promptBatchSizeChanged();
|
||||
}
|
||||
|
||||
double MySettings::repeatPenalty() const
|
||||
{
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
return setting.value("repeatPenalty", default_repeatPenalty).toDouble();
|
||||
}
|
||||
|
||||
void MySettings::setRepeatPenalty(double p)
|
||||
{
|
||||
if (repeatPenalty() == p)
|
||||
return;
|
||||
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
setting.setValue("repeatPenalty", p);
|
||||
emit repeatPenaltyChanged();
|
||||
}
|
||||
|
||||
int MySettings::repeatPenaltyTokens() const
|
||||
{
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
return setting.value("repeatPenaltyTokens", default_repeatPenaltyTokens).toInt();
|
||||
}
|
||||
|
||||
void MySettings::setRepeatPenaltyTokens(int t)
|
||||
{
|
||||
if (repeatPenaltyTokens() == t)
|
||||
return;
|
||||
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
setting.setValue("repeatPenaltyTokens", t);
|
||||
emit repeatPenaltyTokensChanged();
|
||||
}
|
||||
|
||||
QString MySettings::promptTemplate() const
|
||||
{
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
return setting.value("promptTemplate", default_promptTemplate).toString();
|
||||
}
|
||||
|
||||
void MySettings::setPromptTemplate(const QString &t)
|
||||
{
|
||||
if (promptTemplate() == t)
|
||||
return;
|
||||
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
setting.setValue("promptTemplate", t);
|
||||
emit promptTemplateChanged();
|
||||
}
|
||||
|
||||
int MySettings::threadCount() const
|
||||
{
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
return setting.value("threadCount", default_threadCount).toInt();
|
||||
}
|
||||
|
||||
void MySettings::setThreadCount(int c)
|
||||
{
|
||||
if (threadCount() == c)
|
||||
return;
|
||||
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
setting.setValue("threadCount", c);
|
||||
emit threadCountChanged();
|
||||
}
|
||||
|
||||
bool MySettings::saveChats() const
|
||||
{
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
return setting.value("saveChats", default_saveChats).toBool();
|
||||
}
|
||||
|
||||
void MySettings::setSaveChats(bool b)
|
||||
{
|
||||
if (saveChats() == b)
|
||||
return;
|
||||
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
setting.setValue("saveChats", b);
|
||||
emit saveChatsChanged();
|
||||
}
|
||||
|
||||
bool MySettings::saveChatGPTChats() const
|
||||
{
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
return setting.value("saveChatGPTChats", default_saveChatGPTChats).toBool();
|
||||
}
|
||||
|
||||
void MySettings::setSaveChatGPTChats(bool b)
|
||||
{
|
||||
if (saveChatGPTChats() == b)
|
||||
return;
|
||||
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
setting.setValue("saveChatGPTChats", b);
|
||||
emit saveChatGPTChatsChanged();
|
||||
}
|
||||
|
||||
bool MySettings::serverChat() const
|
||||
{
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
return setting.value("serverChat", default_serverChat).toBool();
|
||||
}
|
||||
|
||||
void MySettings::setServerChat(bool b)
|
||||
{
|
||||
if (serverChat() == b)
|
||||
return;
|
||||
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
setting.setValue("serverChat", b);
|
||||
emit serverChatChanged();
|
||||
}
|
||||
|
||||
QString MySettings::modelPath() const
|
||||
{
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
return setting.value("modelPath", defaultLocalModelsPath()).toString();
|
||||
}
|
||||
|
||||
void MySettings::setModelPath(const QString &p)
|
||||
{
|
||||
if (modelPath() == p)
|
||||
return;
|
||||
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
setting.setValue("modelPath", p);
|
||||
emit modelPathChanged();
|
||||
}
|
||||
|
||||
QString MySettings::userDefaultModel() const
|
||||
{
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
return setting.value("userDefaultModel", default_userDefaultModel).toString();
|
||||
}
|
||||
|
||||
void MySettings::setUserDefaultModel(const QString &u)
|
||||
{
|
||||
if (userDefaultModel() == u)
|
||||
return;
|
||||
|
||||
QSettings setting;
|
||||
setting.sync();
|
||||
setting.setValue("userDefaultModel", u);
|
||||
emit userDefaultModelChanged();
|
||||
}
|
||||
|
||||
bool MySettings::forceMetal() const
|
||||
@ -19,10 +341,10 @@ bool MySettings::forceMetal() const
|
||||
return m_forceMetal;
|
||||
}
|
||||
|
||||
void MySettings::setForceMetal(bool enabled)
|
||||
void MySettings::setForceMetal(bool b)
|
||||
{
|
||||
if (m_forceMetal == enabled)
|
||||
if (m_forceMetal == b)
|
||||
return;
|
||||
m_forceMetal = enabled;
|
||||
emit forceMetalChanged(enabled);
|
||||
m_forceMetal = b;
|
||||
emit forceMetalChanged(b);
|
||||
}
|
||||
|
@ -7,15 +7,78 @@
|
||||
class MySettings : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(double temperature READ temperature WRITE setTemperature NOTIFY temperatureChanged)
|
||||
Q_PROPERTY(double topP READ topP WRITE setTopP NOTIFY topPChanged)
|
||||
Q_PROPERTY(int topK READ topK WRITE setTopK NOTIFY topKChanged)
|
||||
Q_PROPERTY(int maxLength READ maxLength WRITE setMaxLength NOTIFY maxLengthChanged)
|
||||
Q_PROPERTY(int promptBatchSize READ promptBatchSize WRITE setPromptBatchSize NOTIFY promptBatchSizeChanged)
|
||||
Q_PROPERTY(double repeatPenalty READ repeatPenalty WRITE setRepeatPenalty NOTIFY repeatPenaltyChanged)
|
||||
Q_PROPERTY(int repeatPenaltyTokens READ repeatPenaltyTokens WRITE setRepeatPenaltyTokens NOTIFY repeatPenaltyTokensChanged)
|
||||
Q_PROPERTY(QString promptTemplate READ promptTemplate WRITE setPromptTemplate NOTIFY promptTemplateChanged)
|
||||
Q_PROPERTY(int threadCount READ threadCount WRITE setThreadCount NOTIFY threadCountChanged)
|
||||
Q_PROPERTY(bool saveChats READ saveChats WRITE setSaveChats NOTIFY saveChatsChanged)
|
||||
Q_PROPERTY(bool saveChatGPTChats READ saveChatGPTChats WRITE setSaveChatGPTChats NOTIFY saveChatGPTChatsChanged)
|
||||
Q_PROPERTY(bool serverChat READ serverChat WRITE setServerChat NOTIFY serverChatChanged)
|
||||
Q_PROPERTY(QString modelPath READ modelPath WRITE setModelPath NOTIFY modelPathChanged)
|
||||
Q_PROPERTY(QString userDefaultModel READ userDefaultModel WRITE setUserDefaultModel NOTIFY userDefaultModelChanged)
|
||||
Q_PROPERTY(bool forceMetal READ forceMetal WRITE setForceMetal NOTIFY forceMetalChanged)
|
||||
|
||||
public:
|
||||
static MySettings *globalInstance();
|
||||
|
||||
// Restore methods
|
||||
Q_INVOKABLE void restoreGenerationDefaults();
|
||||
Q_INVOKABLE void restoreApplicationDefaults();
|
||||
|
||||
// Generation settings
|
||||
double temperature() const;
|
||||
void setTemperature(double t);
|
||||
double topP() const;
|
||||
void setTopP(double p);
|
||||
int topK() const;
|
||||
void setTopK(int k);
|
||||
int maxLength() const;
|
||||
void setMaxLength(int l);
|
||||
int promptBatchSize() const;
|
||||
void setPromptBatchSize(int s);
|
||||
double repeatPenalty() const;
|
||||
void setRepeatPenalty(double p);
|
||||
int repeatPenaltyTokens() const;
|
||||
void setRepeatPenaltyTokens(int t);
|
||||
QString promptTemplate() const;
|
||||
void setPromptTemplate(const QString &t);
|
||||
|
||||
// Application settings
|
||||
int threadCount() const;
|
||||
void setThreadCount(int c);
|
||||
bool saveChats() const;
|
||||
void setSaveChats(bool b);
|
||||
bool saveChatGPTChats() const;
|
||||
void setSaveChatGPTChats(bool b);
|
||||
bool serverChat() const;
|
||||
void setServerChat(bool b);
|
||||
QString modelPath() const;
|
||||
void setModelPath(const QString &p);
|
||||
QString userDefaultModel() const;
|
||||
void setUserDefaultModel(const QString &u);
|
||||
bool forceMetal() const;
|
||||
void setForceMetal(bool enabled);
|
||||
void setForceMetal(bool b);
|
||||
|
||||
Q_SIGNALS:
|
||||
void temperatureChanged();
|
||||
void topPChanged();
|
||||
void topKChanged();
|
||||
void maxLengthChanged();
|
||||
void promptBatchSizeChanged();
|
||||
void repeatPenaltyChanged();
|
||||
void repeatPenaltyTokensChanged();
|
||||
void promptTemplateChanged();
|
||||
void threadCountChanged();
|
||||
void saveChatsChanged();
|
||||
void saveChatGPTChatsChanged();
|
||||
void serverChatChanged();
|
||||
void modelPathChanged();
|
||||
void userDefaultModelChanged();
|
||||
void forceMetalChanged(bool);
|
||||
|
||||
private:
|
||||
|
@ -36,98 +36,27 @@ Dialog {
|
||||
id: theme
|
||||
}
|
||||
|
||||
property real defaultTemperature: 0.7
|
||||
property real defaultTopP: 0.1
|
||||
property int defaultTopK: 40
|
||||
property int defaultMaxLength: 4096
|
||||
property int defaultPromptBatchSize: 128
|
||||
property real defaultRepeatPenalty: 1.18
|
||||
property int defaultRepeatPenaltyTokens: 64
|
||||
property int defaultThreadCount: 0
|
||||
property bool defaultSaveChats: false
|
||||
property bool defaultSaveChatGPTChats: true
|
||||
property bool defaultServerChat: false
|
||||
property string defaultPromptTemplate: "### Human:
|
||||
%1
|
||||
### Assistant:\n"
|
||||
property string defaultModelPath: ModelList.defaultLocalModelsPath()
|
||||
property string defaultUserDefaultModel: "Application default"
|
||||
|
||||
property alias temperature: settings.temperature
|
||||
property alias topP: settings.topP
|
||||
property alias topK: settings.topK
|
||||
property alias maxLength: settings.maxLength
|
||||
property alias promptBatchSize: settings.promptBatchSize
|
||||
property alias promptTemplate: settings.promptTemplate
|
||||
property alias repeatPenalty: settings.repeatPenalty
|
||||
property alias repeatPenaltyTokens: settings.repeatPenaltyTokens
|
||||
property alias threadCount: settings.threadCount
|
||||
property alias saveChats: settings.saveChats
|
||||
property alias saveChatGPTChats: settings.saveChatGPTChats
|
||||
property alias serverChat: settings.serverChat
|
||||
property alias modelPath: settings.modelPath
|
||||
property alias userDefaultModel: settings.userDefaultModel
|
||||
|
||||
Settings {
|
||||
id: settings
|
||||
property real temperature: settingsDialog.defaultTemperature
|
||||
property real topP: settingsDialog.defaultTopP
|
||||
property int topK: settingsDialog.defaultTopK
|
||||
property int maxLength: settingsDialog.defaultMaxLength
|
||||
property int promptBatchSize: settingsDialog.defaultPromptBatchSize
|
||||
property int threadCount: settingsDialog.defaultThreadCount
|
||||
property bool saveChats: settingsDialog.defaultSaveChats
|
||||
property bool saveChatGPTChats: settingsDialog.defaultSaveChatGPTChats
|
||||
property bool serverChat: settingsDialog.defaultServerChat
|
||||
property real repeatPenalty: settingsDialog.defaultRepeatPenalty
|
||||
property int repeatPenaltyTokens: settingsDialog.defaultRepeatPenaltyTokens
|
||||
property string promptTemplate: settingsDialog.defaultPromptTemplate
|
||||
property string modelPath: settingsDialog.defaultModelPath
|
||||
property string userDefaultModel: settingsDialog.defaultUserDefaultModel
|
||||
}
|
||||
|
||||
function restoreGenerationDefaults() {
|
||||
settings.temperature = defaultTemperature
|
||||
settings.topP = defaultTopP
|
||||
settings.topK = defaultTopK
|
||||
settings.maxLength = defaultMaxLength
|
||||
settings.promptBatchSize = defaultPromptBatchSize
|
||||
settings.promptTemplate = defaultPromptTemplate
|
||||
templateTextArea.text = defaultPromptTemplate
|
||||
settings.repeatPenalty = defaultRepeatPenalty
|
||||
settings.repeatPenaltyTokens = defaultRepeatPenaltyTokens
|
||||
settings.sync()
|
||||
MySettings.restoreGenerationDefaults();
|
||||
templateTextArea.text = MySettings.promptTemplate
|
||||
}
|
||||
|
||||
function restoreApplicationDefaults() {
|
||||
settings.modelPath = settingsDialog.defaultModelPath
|
||||
settings.threadCount = defaultThreadCount
|
||||
settings.saveChats = defaultSaveChats
|
||||
settings.saveChatGPTChats = defaultSaveChatGPTChats
|
||||
settings.serverChat = defaultServerChat
|
||||
settings.userDefaultModel = defaultUserDefaultModel
|
||||
ModelList.localModelsPath = settings.modelPath
|
||||
LLM.threadCount = settings.threadCount
|
||||
LLM.serverEnabled = settings.serverChat
|
||||
ChatListModel.shouldSaveChats = settings.saveChats
|
||||
ChatListModel.shouldSaveChatGPTChats = settings.saveChatGPTChats
|
||||
MySettings.restoreApplicationDefaults();
|
||||
ModelList.localModelsPath = MySettings.modelPath
|
||||
LLM.threadCount = MySettings.threadCount
|
||||
LLM.serverEnabled = MySettings.serverChat
|
||||
ChatListModel.shouldSaveChats = MySettings.saveChats
|
||||
ChatListModel.shouldSaveChatGPTChats = MySettings.saveChatGPTChats
|
||||
MySettings.forceMetal = false
|
||||
settings.sync()
|
||||
}
|
||||
|
||||
Component.onCompleted: {
|
||||
LLM.threadCount = settings.threadCount
|
||||
LLM.serverEnabled = settings.serverChat
|
||||
ChatListModel.shouldSaveChats = settings.saveChats
|
||||
ChatListModel.shouldSaveChatGPTChats = settings.saveChatGPTChats
|
||||
ModelList.localModelsPath = settings.modelPath
|
||||
}
|
||||
|
||||
Connections {
|
||||
target: settingsDialog
|
||||
function onClosed() {
|
||||
settings.sync()
|
||||
}
|
||||
LLM.threadCount = MySettings.threadCount
|
||||
LLM.serverEnabled = MySettings.serverChat
|
||||
ChatListModel.shouldSaveChats = MySettings.saveChats
|
||||
ChatListModel.shouldSaveChatGPTChats = MySettings.saveChatGPTChats
|
||||
ModelList.localModelsPath = MySettings.modelPath
|
||||
}
|
||||
|
||||
Item {
|
||||
@ -309,7 +238,7 @@ Dialog {
|
||||
Layout.column: 0
|
||||
}
|
||||
MyTextField {
|
||||
text: settings.temperature.toString()
|
||||
text: MySettings.temperature
|
||||
color: theme.textColor
|
||||
ToolTip.text: qsTr("Temperature increases the chances of choosing less likely tokens.\nNOTE: Higher temperature gives more creative but less predictable outputs.")
|
||||
ToolTip.visible: hovered
|
||||
@ -321,11 +250,10 @@ Dialog {
|
||||
onEditingFinished: {
|
||||
var val = parseFloat(text)
|
||||
if (!isNaN(val)) {
|
||||
settings.temperature = val
|
||||
settings.sync()
|
||||
MySettings.temperature = val
|
||||
focus = false
|
||||
} else {
|
||||
text = settings.temperature.toString()
|
||||
text = MySettings.temperature
|
||||
}
|
||||
}
|
||||
Accessible.role: Accessible.EditableText
|
||||
@ -340,7 +268,7 @@ Dialog {
|
||||
Layout.column: 0
|
||||
}
|
||||
MyTextField {
|
||||
text: settings.topP.toString()
|
||||
text: MySettings.topP
|
||||
color: theme.textColor
|
||||
ToolTip.text: qsTr("Only the most likely tokens up to a total probability of top_p can be chosen.\nNOTE: Prevents choosing highly unlikely tokens, aka Nucleus Sampling")
|
||||
ToolTip.visible: hovered
|
||||
@ -352,11 +280,10 @@ Dialog {
|
||||
onEditingFinished: {
|
||||
var val = parseFloat(text)
|
||||
if (!isNaN(val)) {
|
||||
settings.topP = val
|
||||
settings.sync()
|
||||
MySettings.topP = val
|
||||
focus = false
|
||||
} else {
|
||||
text = settings.topP.toString()
|
||||
text = MySettings.topP
|
||||
}
|
||||
}
|
||||
Accessible.role: Accessible.EditableText
|
||||
@ -371,7 +298,7 @@ Dialog {
|
||||
Layout.column: 0
|
||||
}
|
||||
MyTextField {
|
||||
text: settings.topK.toString()
|
||||
text: MySettings.topK
|
||||
color: theme.textColor
|
||||
ToolTip.text: qsTr("Only the top K most likely tokens will be chosen from")
|
||||
ToolTip.visible: hovered
|
||||
@ -383,11 +310,10 @@ Dialog {
|
||||
onEditingFinished: {
|
||||
var val = parseInt(text)
|
||||
if (!isNaN(val)) {
|
||||
settings.topK = val
|
||||
settings.sync()
|
||||
MySettings.topK = val
|
||||
focus = false
|
||||
} else {
|
||||
text = settings.topK.toString()
|
||||
text = MySettings.topK
|
||||
}
|
||||
}
|
||||
Accessible.role: Accessible.EditableText
|
||||
@ -402,7 +328,7 @@ Dialog {
|
||||
Layout.column: 0
|
||||
}
|
||||
MyTextField {
|
||||
text: settings.maxLength.toString()
|
||||
text: MySettings.maxLength
|
||||
color: theme.textColor
|
||||
ToolTip.text: qsTr("Maximum length of response in tokens")
|
||||
ToolTip.visible: hovered
|
||||
@ -414,11 +340,10 @@ Dialog {
|
||||
onEditingFinished: {
|
||||
var val = parseInt(text)
|
||||
if (!isNaN(val)) {
|
||||
settings.maxLength = val
|
||||
settings.sync()
|
||||
MySettings.maxLength = val
|
||||
focus = false
|
||||
} else {
|
||||
text = settings.maxLength.toString()
|
||||
text = MySettings.maxLength
|
||||
}
|
||||
}
|
||||
Accessible.role: Accessible.EditableText
|
||||
@ -434,7 +359,7 @@ Dialog {
|
||||
Layout.column: 0
|
||||
}
|
||||
MyTextField {
|
||||
text: settings.promptBatchSize.toString()
|
||||
text: MySettings.promptBatchSize
|
||||
color: theme.textColor
|
||||
ToolTip.text: qsTr("Amount of prompt tokens to process at once.\nNOTE: Higher values can speed up reading prompts but will use more RAM")
|
||||
ToolTip.visible: hovered
|
||||
@ -446,11 +371,10 @@ Dialog {
|
||||
onEditingFinished: {
|
||||
var val = parseInt(text)
|
||||
if (!isNaN(val)) {
|
||||
settings.promptBatchSize = val
|
||||
settings.sync()
|
||||
MySettings.promptBatchSize = val
|
||||
focus = false
|
||||
} else {
|
||||
text = settings.promptBatchSize.toString()
|
||||
text = MySettings.promptBatchSize
|
||||
}
|
||||
}
|
||||
Accessible.role: Accessible.EditableText
|
||||
@ -465,7 +389,7 @@ Dialog {
|
||||
Layout.column: 0
|
||||
}
|
||||
MyTextField {
|
||||
text: settings.repeatPenalty.toString()
|
||||
text: MySettings.repeatPenalty
|
||||
color: theme.textColor
|
||||
ToolTip.text: qsTr("Amount to penalize repetitiveness of the output")
|
||||
ToolTip.visible: hovered
|
||||
@ -477,11 +401,10 @@ Dialog {
|
||||
onEditingFinished: {
|
||||
var val = parseFloat(text)
|
||||
if (!isNaN(val)) {
|
||||
settings.repeatPenalty = val
|
||||
settings.sync()
|
||||
MySettings.repeatPenalty = val
|
||||
focus = false
|
||||
} else {
|
||||
text = settings.repeatPenalty.toString()
|
||||
text = MySettings.repeatPenalty
|
||||
}
|
||||
}
|
||||
Accessible.role: Accessible.EditableText
|
||||
@ -496,7 +419,7 @@ Dialog {
|
||||
Layout.column: 0
|
||||
}
|
||||
MyTextField {
|
||||
text: settings.repeatPenaltyTokens.toString()
|
||||
text: MySettings.repeatPenaltyTokens
|
||||
color: theme.textColor
|
||||
ToolTip.text: qsTr("How far back in output to apply repeat penalty")
|
||||
ToolTip.visible: hovered
|
||||
@ -508,11 +431,10 @@ Dialog {
|
||||
onEditingFinished: {
|
||||
var val = parseInt(text)
|
||||
if (!isNaN(val)) {
|
||||
settings.repeatPenaltyTokens = val
|
||||
settings.sync()
|
||||
MySettings.repeatPenaltyTokens = val
|
||||
focus = false
|
||||
} else {
|
||||
text = settings.repeatPenaltyTokens.toString()
|
||||
text = MySettings.repeatPenaltyTokens
|
||||
}
|
||||
}
|
||||
Accessible.role: Accessible.EditableText
|
||||
@ -558,7 +480,7 @@ Dialog {
|
||||
anchors.fill: parent
|
||||
TextArea {
|
||||
id: templateTextArea
|
||||
text: settings.promptTemplate
|
||||
text: MySettings.promptTemplate
|
||||
color: theme.textColor
|
||||
background: Rectangle {
|
||||
implicitWidth: 150
|
||||
@ -569,8 +491,7 @@ Dialog {
|
||||
wrapMode: TextArea.Wrap
|
||||
onTextChanged: {
|
||||
if (templateTextArea.text.indexOf("%1") !== -1) {
|
||||
settings.promptTemplate = text
|
||||
settings.sync()
|
||||
MySettings.promptTemplate = text
|
||||
}
|
||||
}
|
||||
bottomPadding: 10
|
||||
@ -633,21 +554,19 @@ Dialog {
|
||||
Accessible.name: qsTr("ComboBox for displaying/picking the default model")
|
||||
Accessible.description: qsTr("Use this for picking the default model to use; the first item is the current default model")
|
||||
function updateModel() {
|
||||
settings.sync();
|
||||
comboBox.currentIndex = comboBox.indexOfValue(settingsDialog.userDefaultModel);
|
||||
comboBox.currentIndex = comboBox.indexOfValue(MySettings.userDefaultModel);
|
||||
}
|
||||
Component.onCompleted: {
|
||||
comboBox.updateModel()
|
||||
}
|
||||
Connections {
|
||||
target: settings
|
||||
target: MySettings
|
||||
function onUserDefaultModelChanged() {
|
||||
comboBox.updateModel()
|
||||
}
|
||||
}
|
||||
onActivated: {
|
||||
settingsDialog.userDefaultModel = comboBox.currentText
|
||||
settings.sync()
|
||||
MySettings.userDefaultModel = comboBox.currentText
|
||||
}
|
||||
}
|
||||
FolderDialog {
|
||||
@ -657,8 +576,7 @@ Dialog {
|
||||
onAccepted: {
|
||||
modelPathDisplayField.text = selectedFolder
|
||||
ModelList.localModelsPath = modelPathDisplayField.text
|
||||
settings.modelPath = ModelList.localModelsPath
|
||||
settings.sync()
|
||||
MySettings.modelPath = ModelList.localModelsPath
|
||||
}
|
||||
}
|
||||
Label {
|
||||
@ -683,8 +601,7 @@ Dialog {
|
||||
onEditingFinished: {
|
||||
if (isValid) {
|
||||
ModelList.localModelsPath = modelPathDisplayField.text
|
||||
settings.modelPath = ModelList.localModelsPath
|
||||
settings.sync()
|
||||
MySettings.modelPath = ModelList.localModelsPath
|
||||
} else {
|
||||
text = ModelList.localModelsPath
|
||||
}
|
||||
@ -705,7 +622,7 @@ Dialog {
|
||||
Layout.column: 0
|
||||
}
|
||||
MyTextField {
|
||||
text: settingsDialog.threadCount.toString()
|
||||
text: MySettings.threadCount
|
||||
color: theme.textColor
|
||||
ToolTip.text: qsTr("Amount of processing threads to use, a setting of 0 will use the lesser of 4 or your number of CPU threads")
|
||||
ToolTip.visible: hovered
|
||||
@ -717,12 +634,11 @@ Dialog {
|
||||
onEditingFinished: {
|
||||
var val = parseInt(text)
|
||||
if (!isNaN(val)) {
|
||||
settingsDialog.threadCount = val
|
||||
MySettings.threadCount = val
|
||||
LLM.threadCount = val
|
||||
settings.sync()
|
||||
focus = false
|
||||
} else {
|
||||
text = settingsDialog.threadCount.toString()
|
||||
text = MySettings.threadCount
|
||||
}
|
||||
}
|
||||
Accessible.role: Accessible.EditableText
|
||||
@ -740,12 +656,11 @@ Dialog {
|
||||
id: saveChatsBox
|
||||
Layout.row: 4
|
||||
Layout.column: 1
|
||||
checked: settingsDialog.saveChats
|
||||
checked: MySettings.saveChats
|
||||
onClicked: {
|
||||
Network.sendSaveChatsToggled(saveChatsBox.checked);
|
||||
settingsDialog.saveChats = saveChatsBox.checked
|
||||
MySettings.saveChats = !MySettings.saveChats
|
||||
ChatListModel.shouldSaveChats = saveChatsBox.checked
|
||||
settings.sync()
|
||||
}
|
||||
ToolTip.text: qsTr("WARNING: Saving chats to disk can be ~2GB per chat")
|
||||
ToolTip.visible: hovered
|
||||
@ -761,11 +676,10 @@ Dialog {
|
||||
id: saveChatGPTChatsBox
|
||||
Layout.row: 5
|
||||
Layout.column: 1
|
||||
checked: settingsDialog.saveChatGPTChats
|
||||
checked: MySettings.saveChatGPTChats
|
||||
onClicked: {
|
||||
settingsDialog.saveChatGPTChats = saveChatGPTChatsBox.checked
|
||||
MySettings.saveChatGPTChats = !MySettings.saveChatGPTChats
|
||||
ChatListModel.shouldSaveChatGPTChats = saveChatGPTChatsBox.checked
|
||||
settings.sync()
|
||||
}
|
||||
}
|
||||
Label {
|
||||
@ -779,11 +693,10 @@ Dialog {
|
||||
id: serverChatBox
|
||||
Layout.row: 6
|
||||
Layout.column: 1
|
||||
checked: settings.serverChat
|
||||
checked: MySettings.serverChat
|
||||
onClicked: {
|
||||
settingsDialog.serverChat = serverChatBox.checked
|
||||
MySettings.serverChat = !MySettings.serverChat
|
||||
LLM.serverEnabled = serverChatBox.checked
|
||||
settings.sync()
|
||||
}
|
||||
ToolTip.text: qsTr("WARNING: This enables the gui to act as a local REST web server(OpenAI API compliant) for API requests and will increase your RAM usage as well")
|
||||
ToolTip.visible: hovered
|
||||
|
Loading…
Reference in New Issue
Block a user