From a22de08936c5b74d3424a6e7a5f045cec6d6fb1f Mon Sep 17 00:00:00 2001 From: Fangyin Cheng Date: Tue, 4 Mar 2025 18:45:52 +0800 Subject: [PATCH] chore: Modify ollama config --- packages/dbgpt-core/src/dbgpt/model/proxy/llms/ollama.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/dbgpt-core/src/dbgpt/model/proxy/llms/ollama.py b/packages/dbgpt-core/src/dbgpt/model/proxy/llms/ollama.py index cc820587e..859752af3 100644 --- a/packages/dbgpt-core/src/dbgpt/model/proxy/llms/ollama.py +++ b/packages/dbgpt-core/src/dbgpt/model/proxy/llms/ollama.py @@ -9,13 +9,13 @@ from dbgpt.core.awel.flow import ( ResourceCategory, auto_register_resource, ) +from dbgpt.core.interface.parameter import LLMDeployModelParameters from dbgpt.model.proxy.base import ( AsyncGenerateStreamFunction, GenerateStreamFunction, ProxyLLMClient, register_proxy_model_adapter, ) -from dbgpt.model.proxy.llms.chatgpt import OpenAICompatibleDeployModelParameters from dbgpt.model.proxy.llms.proxy_model import ProxyModel, parse_model_request from dbgpt.util.i18n_utils import _ @@ -31,7 +31,7 @@ logger = logging.getLogger(__name__) show_in_ui=False, ) @dataclass -class OllamaDeployModelParameters(OpenAICompatibleDeployModelParameters): +class OllamaDeployModelParameters(LLMDeployModelParameters): """Deploy model parameters for Ollama.""" provider: str = "proxy/ollama"