mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2025-06-28 16:27:31 +00:00
ollama-hpp immediately segfaulted. will try something else
Signed-off-by: Jared Van Bortel <jared@nomic.ai>
This commit is contained in:
parent
f4a350d606
commit
729a5b0d9f
8
gpt4all-backend-test/CMakeLists.txt
Normal file
8
gpt4all-backend-test/CMakeLists.txt
Normal file
@ -0,0 +1,8 @@
|
||||
cmake_minimum_required(VERSION 3.29)
|
||||
project(gpt4all-backend-test VERSION 0.1 LANGUAGES CXX)
|
||||
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin")
|
||||
include(../common/common.cmake)
|
||||
|
||||
add_subdirectory(../gpt4all-backend "${CMAKE_CURRENT_BINARY_DIR}/gpt4all-backend")
|
||||
add_subdirectory(src)
|
21
gpt4all-backend-test/src/CMakeLists.txt
Normal file
21
gpt4all-backend-test/src/CMakeLists.txt
Normal file
@ -0,0 +1,21 @@
|
||||
set(TARGET test-backend)
|
||||
|
||||
configure_file(config.cppm.in "${CMAKE_CURRENT_BINARY_DIR}/config.cppm")
|
||||
|
||||
add_executable(${TARGET}
|
||||
main.cpp
|
||||
)
|
||||
target_compile_features(${TARGET} PUBLIC cxx_std_23)
|
||||
if (CMAKE_COMPILER_IS_GNUCXX)
|
||||
target_compile_options(${TARGET} PUBLIC -fmodules-ts)
|
||||
endif()
|
||||
target_sources(${TARGET} PRIVATE
|
||||
FILE_SET gpt4all_backend TYPE CXX_MODULES BASE_DIRS
|
||||
"${CMAKE_CURRENT_BINARY_DIR}"
|
||||
FILES
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/config.cppm"
|
||||
)
|
||||
gpt4all_add_warning_options(${TARGET})
|
||||
target_link_libraries(${TARGET} PRIVATE
|
||||
gpt4all-backend
|
||||
)
|
7
gpt4all-backend-test/src/config.cppm.in
Normal file
7
gpt4all-backend-test/src/config.cppm.in
Normal file
@ -0,0 +1,7 @@
|
||||
module;
|
||||
|
||||
#include <QByteArray>
|
||||
|
||||
export module gpt4all.test.config;
|
||||
|
||||
export inline QByteArray OLLAMA_URL("@G4A_TEST_OLLAMA_URL@");
|
12
gpt4all-backend-test/src/main.cpp
Normal file
12
gpt4all-backend-test/src/main.cpp
Normal file
@ -0,0 +1,12 @@
|
||||
import fmt;
|
||||
import gpt4all.backend.main;
|
||||
import gpt4all.test.config;
|
||||
|
||||
#include <QLatin1StringView>
|
||||
|
||||
|
||||
int main()
|
||||
{
|
||||
LLMProvider provider { QLatin1StringView(OLLAMA_URL) };
|
||||
fmt::print("Server version: {}", provider.getVersion());
|
||||
}
|
@ -2,17 +2,27 @@ module;
|
||||
|
||||
#include <string>
|
||||
|
||||
#include <QString>
|
||||
#include <QLatin1StringView>
|
||||
|
||||
#include <ollama.hpp>
|
||||
|
||||
module gpt4all.backend.main;
|
||||
|
||||
import fmt;
|
||||
|
||||
LLMProvider::LLMProvider(QLatin1StringView serverUrl)
|
||||
: m_serverUrl(serverUrl.data(), serverUrl.size())
|
||||
, m_ollama(std::make_unique<Ollama>(m_serverUrl))
|
||||
{}
|
||||
|
||||
std::string LLMProvider::qstringToSTL(const QString &s)
|
||||
LLMProvider::~LLMProvider() = default;
|
||||
|
||||
void LLMProvider::setServerUrl(QLatin1StringView serverUrl)
|
||||
{
|
||||
fmt::format("{}", "foo");
|
||||
return s.toStdString();
|
||||
m_serverUrl.assign(serverUrl.data(), serverUrl.size());
|
||||
m_ollama->setServerURL(m_serverUrl);
|
||||
}
|
||||
|
||||
QByteArray LLMProvider::getVersion()
|
||||
{
|
||||
return QByteArray(m_ollama->get_version());
|
||||
}
|
||||
|
@ -1,12 +1,27 @@
|
||||
module;
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
|
||||
#include <QString>
|
||||
#include <QLatin1StringView>
|
||||
|
||||
export module gpt4all.backend.main;
|
||||
|
||||
class Ollama;
|
||||
|
||||
|
||||
export class LLMProvider {
|
||||
static std::string qstringToSTL(const QString &s);
|
||||
public:
|
||||
LLMProvider(QLatin1StringView serverUrl);
|
||||
~LLMProvider();
|
||||
|
||||
QLatin1StringView serverUrl() const { return QLatin1StringView(m_serverUrl); }
|
||||
void setServerUrl(QLatin1StringView serverUrl);
|
||||
|
||||
/// Retrieve the Ollama version, e.g. "0.5.1"
|
||||
QByteArray getVersion();
|
||||
|
||||
private:
|
||||
std::string m_serverUrl;
|
||||
std::unique_ptr<Ollama> m_ollama;
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user