mirror of
https://github.com/nomic-ai/gpt4all
synced 2024-11-11 19:11:37 +00:00
Fixes https://github.com/nomic-ai/gpt4all/issues/1760 LLModel ERROR: Could not find CPU LLaMA implementation.
Inspired by Microsoft docs for LoadLibraryExA (https://learn.microsoft.com/en-us/windows/win32/api/libloaderapi/nf-libloaderapi-loadlibraryexa). When using LOAD_LIBRARY_SEARCH_DLL_LOAD_DIR, the lpFileName parameter must specify a fully qualified path, also it needs to be backslashes (\), not forward slashes (/).
This commit is contained in:
parent
3e99b90c0b
commit
38d81c14d0
@ -54,6 +54,7 @@ public:
|
||||
};
|
||||
#else
|
||||
#include <string>
|
||||
#include <algorithm>
|
||||
#include <exception>
|
||||
#include <stdexcept>
|
||||
#ifndef NOMINMAX
|
||||
@ -75,7 +76,9 @@ public:
|
||||
|
||||
Dlhandle() : chandle(nullptr) {}
|
||||
Dlhandle(const std::string& fpath) {
|
||||
chandle = LoadLibraryExA(fpath.c_str(), NULL, LOAD_LIBRARY_SEARCH_DEFAULT_DIRS | LOAD_LIBRARY_SEARCH_DLL_LOAD_DIR);
|
||||
std::string path = fpath;
|
||||
std::replace(path.begin(), path.end(), '/', '\\');
|
||||
chandle = LoadLibraryExA(path.c_str(), NULL, LOAD_LIBRARY_SEARCH_DEFAULT_DIRS | LOAD_LIBRARY_SEARCH_DLL_LOAD_DIR);
|
||||
if (!chandle) {
|
||||
throw Exception("dlopen(\""+fpath+"\"): Error");
|
||||
}
|
||||
|
@ -104,7 +104,7 @@ const std::vector<LLModel::Implementation> &LLModel::Implementation::implementat
|
||||
|
||||
// Add to list if model implementation
|
||||
try {
|
||||
Dlhandle dl(p.string());
|
||||
Dlhandle dl(std::filesystem::absolute(p).string());
|
||||
if (!Implementation::isImplementation(dl)) {
|
||||
continue;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user