mirror of
https://github.com/nomic-ai/gpt4all
synced 2024-11-18 03:25:46 +00:00
load all model libs
This commit is contained in:
parent
ae42805d49
commit
9f203c211f
@ -5,6 +5,7 @@ import platform
|
|||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
import glob
|
||||||
|
|
||||||
class DualStreamProcessor:
|
class DualStreamProcessor:
|
||||||
def __init__(self, stream=None):
|
def __init__(self, stream=None):
|
||||||
@ -38,21 +39,27 @@ def load_llmodel_library():
|
|||||||
c_lib_ext = get_c_shared_lib_extension()
|
c_lib_ext = get_c_shared_lib_extension()
|
||||||
|
|
||||||
llmodel_file = "libllmodel" + '.' + c_lib_ext
|
llmodel_file = "libllmodel" + '.' + c_lib_ext
|
||||||
|
model_lib_files = glob.glob(f"lib*.{c_lib_ext}")
|
||||||
|
model_lib_dirs = []
|
||||||
|
|
||||||
|
for lib in model_lib_files:
|
||||||
|
if lib != llmodel_file:
|
||||||
|
model_lib_dirs.append(str(pkg_resources.resource_filename('gpt4all', \
|
||||||
|
os.path.join(LLMODEL_PATH, lib))).replace("\\", "\\\\"))
|
||||||
|
|
||||||
llama_file = "libllama" + '.' + c_lib_ext
|
llama_file = "libllama" + '.' + c_lib_ext
|
||||||
llama_dir = str(pkg_resources.resource_filename('gpt4all', os.path.join(LLMODEL_PATH, llama_file)))
|
llama_dir = str(pkg_resources.resource_filename('gpt4all', os.path.join(LLMODEL_PATH, llama_file)))
|
||||||
llmodel_dir = str(pkg_resources.resource_filename('gpt4all', os.path.join(LLMODEL_PATH, llmodel_file)))
|
llmodel_dir = str(pkg_resources.resource_filename('gpt4all', \
|
||||||
|
os.path.join(LLMODEL_PATH, llmodel_file))).replace("\\", "\\\\")
|
||||||
|
|
||||||
# For windows
|
model_libs = []
|
||||||
llama_dir = llama_dir.replace("\\", "\\\\")
|
for model_dir in model_lib_dirs:
|
||||||
llmodel_dir = llmodel_dir.replace("\\", "\\\\")
|
model_libs.append(ctypes.CDLL(model_dir, mode=ctypes.RTLD_GLOBAL))
|
||||||
|
|
||||||
llama_lib = ctypes.CDLL(llama_dir, mode=ctypes.RTLD_GLOBAL)
|
|
||||||
llmodel_lib = ctypes.CDLL(llmodel_dir)
|
llmodel_lib = ctypes.CDLL(llmodel_dir)
|
||||||
|
|
||||||
return llmodel_lib, llama_lib
|
return llmodel_lib, model_libs
|
||||||
|
|
||||||
|
llmodel, model_libs = load_llmodel_library()
|
||||||
llmodel, llama = load_llmodel_library()
|
|
||||||
|
|
||||||
class LLModelError(ctypes.Structure):
|
class LLModelError(ctypes.Structure):
|
||||||
_fields_ = [("message", ctypes.c_char_p),
|
_fields_ = [("message", ctypes.c_char_p),
|
||||||
|
Loading…
Reference in New Issue
Block a user