ollama-hpp immediately segfaulted. will try something else

Signed-off-by: Jared Van Bortel <jared@nomic.ai>
This commit is contained in:
Jared Van Bortel 2025-02-18 17:23:19 -05:00
parent f4a350d606
commit 729a5b0d9f
6 changed files with 80 additions and 7 deletions

View File

@ -0,0 +1,8 @@
cmake_minimum_required(VERSION 3.29)
project(gpt4all-backend-test VERSION 0.1 LANGUAGES CXX)
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin")
include(../common/common.cmake)
add_subdirectory(../gpt4all-backend "${CMAKE_CURRENT_BINARY_DIR}/gpt4all-backend")
add_subdirectory(src)

View File

@ -0,0 +1,21 @@
set(TARGET test-backend)
configure_file(config.cppm.in "${CMAKE_CURRENT_BINARY_DIR}/config.cppm")
add_executable(${TARGET}
main.cpp
)
target_compile_features(${TARGET} PUBLIC cxx_std_23)
if (CMAKE_COMPILER_IS_GNUCXX)
target_compile_options(${TARGET} PUBLIC -fmodules-ts)
endif()
target_sources(${TARGET} PRIVATE
FILE_SET gpt4all_backend TYPE CXX_MODULES BASE_DIRS
"${CMAKE_CURRENT_BINARY_DIR}"
FILES
"${CMAKE_CURRENT_BINARY_DIR}/config.cppm"
)
gpt4all_add_warning_options(${TARGET})
target_link_libraries(${TARGET} PRIVATE
gpt4all-backend
)

View File

@ -0,0 +1,7 @@
module;
#include <QByteArray>
export module gpt4all.test.config;
export inline QByteArray OLLAMA_URL("@G4A_TEST_OLLAMA_URL@");

View File

@ -0,0 +1,12 @@
import fmt;
import gpt4all.backend.main;
import gpt4all.test.config;
#include <QLatin1StringView>
int main()
{
LLMProvider provider { QLatin1StringView(OLLAMA_URL) };
fmt::print("Server version: {}", provider.getVersion());
}

View File

@ -2,17 +2,27 @@ module;
#include <string>
#include <QString>
#include <QLatin1StringView>
#include <ollama.hpp>
module gpt4all.backend.main;
import fmt;
LLMProvider::LLMProvider(QLatin1StringView serverUrl)
: m_serverUrl(serverUrl.data(), serverUrl.size())
, m_ollama(std::make_unique<Ollama>(m_serverUrl))
{}
std::string LLMProvider::qstringToSTL(const QString &s)
LLMProvider::~LLMProvider() = default;
void LLMProvider::setServerUrl(QLatin1StringView serverUrl)
{
fmt::format("{}", "foo");
return s.toStdString();
m_serverUrl.assign(serverUrl.data(), serverUrl.size());
m_ollama->setServerURL(m_serverUrl);
}
QByteArray LLMProvider::getVersion()
{
return QByteArray(m_ollama->get_version());
}

View File

@ -1,12 +1,27 @@
module;
#include <memory>
#include <string>
#include <QString>
#include <QLatin1StringView>
export module gpt4all.backend.main;
class Ollama;
export class LLMProvider {
static std::string qstringToSTL(const QString &s);
public:
LLMProvider(QLatin1StringView serverUrl);
~LLMProvider();
QLatin1StringView serverUrl() const { return QLatin1StringView(m_serverUrl); }
void setServerUrl(QLatin1StringView serverUrl);
/// Retrieve the Ollama version, e.g. "0.5.1"
QByteArray getVersion();
private:
std::string m_serverUrl;
std::unique_ptr<Ollama> m_ollama;
};