From 6937b970e50ba0e696529149a97b0875d4de6a86 Mon Sep 17 00:00:00 2001 From: Fangyin Cheng Date: Wed, 14 May 2025 10:49:53 +0800 Subject: [PATCH] feat(model): Support llama.cpp for GLM4-z1 (#2690) --- packages/dbgpt-core/pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/dbgpt-core/pyproject.toml b/packages/dbgpt-core/pyproject.toml index 353a810ce..1d0645737 100644 --- a/packages/dbgpt-core/pyproject.toml +++ b/packages/dbgpt-core/pyproject.toml @@ -118,8 +118,8 @@ llama_cpp = [ "llama-cpp-python", ] llama_cpp_server = [ - "llama-cpp-server-py-core>=0.1.3", - "llama-cpp-server-py>=0.1.3", + "llama-cpp-server-py-core>=0.1.4", + "llama-cpp-server-py>=0.1.4", ] proxy_ollama = ["ollama"] proxy_zhipuai = ["zhipuai>=2.1.5"]