Prefer 7b models in order of default model load.

This commit is contained in:
Adam Treat 2023-07-12 12:45:08 -04:00 committed by AT
parent 5df4f1bf8c
commit 60627bd41f
2 changed files with 4 additions and 36 deletions

View File

@ -161,16 +161,6 @@ int InstalledModels::count() const
return rowCount();
}
QString InstalledModels::firstId() const
{
if (rowCount() > 0) {
QModelIndex firstIndex = index(0, 0);
return sourceModel()->data(firstIndex, ModelList::IdRole).toString();
} else {
return QString();
}
}
DownloadableModels::DownloadableModels(QObject *parent)
: QSortFilterProxyModel(parent)
, m_expanded(false)
@ -298,12 +288,9 @@ ModelInfo ModelList::defaultModelInfo() const
settings.sync();
// The user default model can be set by the user in the settings dialog. The "default" user
// default model is "Application default" which signals we should use the default model that was
// specified by the models.json file.
// default model is "Application default" which signals we should use the logic here.
const QString userDefaultModelName = MySettings::globalInstance()->userDefaultModel();
const bool hasUserDefaultName = !userDefaultModelName.isEmpty() && userDefaultModelName != "Application default";
const QString defaultModelName = settings.value("defaultModel").toString();
const bool hasDefaultName = hasUserDefaultName ? false : !defaultModelName.isEmpty();
ModelInfo *defaultModel = nullptr;
for (ModelInfo *info : m_models) {
@ -311,12 +298,10 @@ ModelInfo ModelList::defaultModelInfo() const
continue;
defaultModel = info;
// If we don't have either setting, then just use the first model that is installed
if (!hasUserDefaultName && !hasDefaultName)
break;
const size_t ramrequired = defaultModel->ramrequired;
// If we don't have a user specified default, but *do* have a default setting and match, then use it
if (!hasUserDefaultName && hasDefaultName && (defaultModel->id() == defaultModelName))
// If we don't have either setting, then just use the first model that requires less than 16GB that is installed
if (!hasUserDefaultName && !info->isChatGPT && ramrequired > 0 && ramrequired < 16)
break;
// If we have a user specified default and match, then use it
@ -847,14 +832,6 @@ void ModelList::updateModelsFromDirectory()
processDirectory(exePath);
if (localPath != exePath)
processDirectory(localPath);
if (installedModels()->count()) {
const QString firstModel =
installedModels()->firstId();
QSettings settings;
settings.setValue("defaultModel", firstModel);
settings.sync();
}
}
void ModelList::updateModelsFromJson()
@ -1132,14 +1109,6 @@ void ModelList::parseModelsJsonFile(const QByteArray &jsonData, bool save)
updateData(id, ModelList::QuantRole, "NA");
updateData(id, ModelList::TypeRole, "GPT");
}
if (installedModels()->count()) {
const QString firstModel =
installedModels()->firstId();
QSettings settings;
settings.setValue("defaultModel", firstModel);
settings.sync();
}
}
void ModelList::updateModelsFromSettings()

View File

@ -127,7 +127,6 @@ class InstalledModels : public QSortFilterProxyModel
public:
explicit InstalledModels(QObject *parent);
int count() const;
QString firstId() const;
Q_SIGNALS:
void countChanged();