2023-05-31 21:04:01 +00:00
|
|
|
#include "llmodel.h"
|
2023-06-01 14:51:46 +00:00
|
|
|
#include "dlhandle.h"
|
2023-06-26 21:53:17 +00:00
|
|
|
#include "sysinfo.h"
|
2023-05-31 21:04:01 +00:00
|
|
|
|
2023-06-02 03:15:58 +00:00
|
|
|
#include <iostream>
|
2023-05-31 21:04:01 +00:00
|
|
|
#include <string>
|
|
|
|
#include <vector>
|
|
|
|
#include <fstream>
|
|
|
|
#include <filesystem>
|
2023-05-31 19:37:25 +00:00
|
|
|
#include <cassert>
|
2023-06-01 15:41:04 +00:00
|
|
|
#include <cstdlib>
|
2023-06-05 13:23:17 +00:00
|
|
|
#include <sstream>
|
2023-07-12 15:44:08 +00:00
|
|
|
#ifdef _MSC_VER
|
2023-07-12 16:04:56 +00:00
|
|
|
#include <windows.h>
|
2023-07-12 15:44:08 +00:00
|
|
|
#include <processthreadsapi.h>
|
|
|
|
#endif
|
2023-05-31 21:04:01 +00:00
|
|
|
|
2023-06-05 15:19:02 +00:00
|
|
|
std::string s_implementations_search_path = ".";
|
2023-06-02 14:57:21 +00:00
|
|
|
|
2023-06-04 19:28:58 +00:00
|
|
|
static bool has_at_least_minimal_hardware() {
|
|
|
|
#ifdef __x86_64__
|
|
|
|
#ifndef _MSC_VER
|
|
|
|
return __builtin_cpu_supports("avx");
|
|
|
|
#else
|
2023-07-12 15:44:08 +00:00
|
|
|
return IsProcessorFeaturePresent(PF_AVX_INSTRUCTIONS_AVAILABLE);
|
2023-06-04 19:28:58 +00:00
|
|
|
#endif
|
|
|
|
#else
|
|
|
|
return true; // Don't know how to handle non-x86_64
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2023-06-01 11:57:10 +00:00
|
|
|
static bool requires_avxonly() {
|
2023-06-01 01:26:18 +00:00
|
|
|
#ifdef __x86_64__
|
|
|
|
#ifndef _MSC_VER
|
|
|
|
return !__builtin_cpu_supports("avx2");
|
|
|
|
#else
|
2023-07-12 15:44:08 +00:00
|
|
|
return !IsProcessorFeaturePresent(PF_AVX2_INSTRUCTIONS_AVAILABLE);
|
2023-06-01 01:26:18 +00:00
|
|
|
#endif
|
|
|
|
#else
|
|
|
|
return false; // Don't know how to handle non-x86_64
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2023-07-08 14:04:38 +00:00
|
|
|
LLMImplementation::LLMImplementation(Dlhandle &&dlhandle_)
|
|
|
|
: m_dlhandle(new Dlhandle(std::move(dlhandle_))) {
|
|
|
|
auto get_model_type = m_dlhandle->get<const char *()>("get_model_type");
|
2023-05-31 19:37:25 +00:00
|
|
|
assert(get_model_type);
|
2023-07-08 14:04:38 +00:00
|
|
|
m_modelType = get_model_type();
|
|
|
|
auto get_build_variant = m_dlhandle->get<const char *()>("get_build_variant");
|
2023-05-31 19:37:25 +00:00
|
|
|
assert(get_build_variant);
|
2023-07-08 14:04:38 +00:00
|
|
|
m_buildVariant = get_build_variant();
|
|
|
|
m_magicMatch = m_dlhandle->get<bool(std::ifstream&)>("magic_match");
|
2023-05-31 19:37:25 +00:00
|
|
|
assert(magicMatch);
|
2023-07-08 14:04:38 +00:00
|
|
|
m_construct = m_dlhandle->get<LLModel *()>("construct");
|
2023-05-31 19:37:25 +00:00
|
|
|
assert(construct_);
|
|
|
|
}
|
|
|
|
|
2023-07-08 14:04:38 +00:00
|
|
|
LLMImplementation::LLMImplementation(LLMImplementation &&o)
|
|
|
|
: m_magicMatch(o.m_magicMatch)
|
|
|
|
, m_construct(o.m_construct)
|
|
|
|
, m_modelType(o.m_modelType)
|
|
|
|
, m_buildVariant(o.m_buildVariant)
|
|
|
|
, m_dlhandle(o.m_dlhandle) {
|
|
|
|
o.m_dlhandle = nullptr;
|
2023-06-01 15:16:33 +00:00
|
|
|
}
|
|
|
|
|
2023-07-08 14:04:38 +00:00
|
|
|
LLMImplementation::~LLMImplementation() {
|
|
|
|
if (m_dlhandle) delete m_dlhandle;
|
2023-06-01 14:51:46 +00:00
|
|
|
}
|
|
|
|
|
2023-07-08 14:04:38 +00:00
|
|
|
bool LLMImplementation::isImplementation(const Dlhandle &dl) {
|
2023-05-31 19:37:25 +00:00
|
|
|
return dl.get<bool(uint32_t)>("is_g4a_backend_model_implementation");
|
|
|
|
}
|
|
|
|
|
2023-07-08 14:04:38 +00:00
|
|
|
const std::vector<LLMImplementation> &LLMImplementation::implementationList() {
|
2023-06-01 11:57:10 +00:00
|
|
|
// NOTE: allocated on heap so we leak intentionally on exit so we have a chance to clean up the
|
|
|
|
// individual models without the cleanup of the static list interfering
|
2023-07-08 14:04:38 +00:00
|
|
|
static auto* libs = new std::vector<LLMImplementation>([] () {
|
|
|
|
std::vector<LLMImplementation> fres;
|
2023-05-31 21:04:01 +00:00
|
|
|
|
2023-06-05 13:23:17 +00:00
|
|
|
auto search_in_directory = [&](const std::string& paths) {
|
|
|
|
std::stringstream ss(paths);
|
|
|
|
std::string path;
|
|
|
|
// Split the paths string by the delimiter and process each path.
|
|
|
|
while (std::getline(ss, path, ';')) {
|
|
|
|
std::filesystem::path fs_path(path);
|
|
|
|
// Iterate over all libraries
|
|
|
|
for (const auto& f : std::filesystem::directory_iterator(fs_path)) {
|
|
|
|
const std::filesystem::path& p = f.path();
|
|
|
|
if (p.extension() != LIB_FILE_EXT) continue;
|
|
|
|
// Add to list if model implementation
|
|
|
|
try {
|
|
|
|
Dlhandle dl(p.string());
|
2023-07-08 14:04:38 +00:00
|
|
|
if (!LLMImplementation::isImplementation(dl)) {
|
2023-06-05 13:23:17 +00:00
|
|
|
continue;
|
|
|
|
}
|
2023-07-08 14:04:38 +00:00
|
|
|
fres.emplace_back(LLMImplementation(std::move(dl)));
|
2023-06-05 13:23:17 +00:00
|
|
|
} catch (...) {}
|
|
|
|
}
|
2023-05-31 21:04:01 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2023-06-05 15:19:02 +00:00
|
|
|
search_in_directory(s_implementations_search_path);
|
2023-06-05 13:23:17 +00:00
|
|
|
|
2023-05-31 21:04:01 +00:00
|
|
|
return fres;
|
|
|
|
}());
|
2023-05-31 19:37:25 +00:00
|
|
|
// Return static result
|
|
|
|
return *libs;
|
|
|
|
}
|
|
|
|
|
2023-07-08 14:04:38 +00:00
|
|
|
const LLMImplementation* LLMImplementation::implementation(std::ifstream& f, const std::string& buildVariant) {
|
2023-06-01 14:51:46 +00:00
|
|
|
for (const auto& i : implementationList()) {
|
2023-05-31 21:04:01 +00:00
|
|
|
f.seekg(0);
|
2023-07-08 14:04:38 +00:00
|
|
|
if (!i.m_magicMatch(f)) continue;
|
|
|
|
if (buildVariant != i.m_buildVariant) continue;
|
2023-05-31 19:37:25 +00:00
|
|
|
return &i;
|
2023-05-31 21:04:01 +00:00
|
|
|
}
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2023-07-08 14:04:38 +00:00
|
|
|
LLModel *LLMImplementation::construct(const std::string &modelPath, std::string buildVariant) {
|
2023-06-04 19:28:58 +00:00
|
|
|
|
|
|
|
if (!has_at_least_minimal_hardware())
|
|
|
|
return nullptr;
|
|
|
|
|
2023-05-31 21:04:01 +00:00
|
|
|
// Read magic
|
|
|
|
std::ifstream f(modelPath, std::ios::binary);
|
|
|
|
if (!f) return nullptr;
|
|
|
|
// Get correct implementation
|
2023-07-08 14:04:38 +00:00
|
|
|
const LLMImplementation* impl = nullptr;
|
2023-06-09 20:48:46 +00:00
|
|
|
|
|
|
|
#if defined(__APPLE__) && defined(__arm64__) // FIXME: See if metal works for intel macs
|
|
|
|
if (buildVariant == "auto") {
|
2023-06-26 21:53:17 +00:00
|
|
|
size_t total_mem = getSystemTotalRAMInBytes();
|
2023-06-09 20:48:46 +00:00
|
|
|
impl = implementation(f, "metal");
|
2023-06-26 21:53:17 +00:00
|
|
|
if(impl) {
|
|
|
|
LLModel* metalimpl = impl->construct();
|
|
|
|
size_t req_mem = metalimpl->requiredMem(modelPath);
|
|
|
|
float req_to_total = (float) req_mem / (float) total_mem;
|
|
|
|
// on a 16GB M2 Mac a 13B q4_0 (0.52) works for me but a 13B q4_K_M (0.55) does not
|
|
|
|
if (req_to_total >= 0.53) {
|
|
|
|
delete metalimpl;
|
|
|
|
impl = nullptr;
|
|
|
|
} else {
|
|
|
|
return metalimpl;
|
|
|
|
}
|
|
|
|
}
|
2023-06-09 20:48:46 +00:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
if (!impl) {
|
|
|
|
//TODO: Auto-detect CUDA/OpenCL
|
|
|
|
if (buildVariant == "auto") {
|
|
|
|
if (requires_avxonly()) {
|
|
|
|
buildVariant = "avxonly";
|
|
|
|
} else {
|
|
|
|
buildVariant = "default";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
impl = implementation(f, buildVariant);
|
|
|
|
if (!impl) return nullptr;
|
|
|
|
}
|
2023-05-31 21:04:01 +00:00
|
|
|
f.close();
|
2023-07-08 14:04:38 +00:00
|
|
|
|
2023-05-31 19:37:25 +00:00
|
|
|
// Construct and return llmodel implementation
|
2023-07-08 14:04:38 +00:00
|
|
|
auto fres = impl->m_construct();
|
|
|
|
fres->m_implementation = impl;
|
|
|
|
return fres;
|
2023-05-31 21:04:01 +00:00
|
|
|
}
|
2023-06-05 15:19:02 +00:00
|
|
|
|
2023-07-08 14:04:38 +00:00
|
|
|
void LLMImplementation::setImplementationsSearchPath(const std::string& path) {
|
2023-06-05 15:19:02 +00:00
|
|
|
s_implementations_search_path = path;
|
|
|
|
}
|
|
|
|
|
2023-07-08 14:04:38 +00:00
|
|
|
const std::string& LLMImplementation::implementationsSearchPath() {
|
2023-06-05 15:19:02 +00:00
|
|
|
return s_implementations_search_path;
|
|
|
|
}
|