chat: set search path early

This fixes the issues with installed versions of v2.6.0.
This commit is contained in:
Jared Van Bortel
2024-01-11 12:02:39 -05:00
parent f7aeeca884
commit 7e9786fccf
5 changed files with 27 additions and 27 deletions

View File

@@ -192,7 +192,7 @@ LLModel *LLModel::Implementation::construct(const std::string &modelPath, std::s
return fres;
}
LLModel *LLModel::Implementation::constructCpuLlama() {
LLModel *LLModel::Implementation::constructDefaultLlama() {
const LLModel::Implementation *impl = nullptr;
for (const auto &i : implementationList()) {
if (i.m_buildVariant == "metal" || i.m_modelType != "LLaMA") continue;
@@ -208,8 +208,8 @@ LLModel *LLModel::Implementation::constructCpuLlama() {
}
std::vector<LLModel::GPUDevice> LLModel::Implementation::availableGPUDevices() {
static LLModel *cpuLlama = LLModel::Implementation::constructCpuLlama(); // (memory leak)
if (cpuLlama) { return cpuLlama->availableGPUDevices(0); }
static LLModel *llama = LLModel::Implementation::constructDefaultLlama(); // (memory leak)
if (llama) { return llama->availableGPUDevices(0); }
return {};
}

View File

@@ -43,7 +43,7 @@ public:
static const std::string& implementationsSearchPath();
private:
static LLModel *constructCpuLlama();
static LLModel *constructDefaultLlama();
bool (*m_magicMatch)(const char *fname);
LLModel *(*m_construct)();