From 048fb6c40276a69564e5aa49d646fd6f0b1acdc9 Mon Sep 17 00:00:00 2001 From: xtyuns Date: Mon, 25 Dec 2023 20:00:04 +0800 Subject: [PATCH] fix: default proxyllm generator function (#971) --- dbgpt/model/llm_out/proxy_llm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dbgpt/model/llm_out/proxy_llm.py b/dbgpt/model/llm_out/proxy_llm.py index 14b2c3177..390ef4886 100644 --- a/dbgpt/model/llm_out/proxy_llm.py +++ b/dbgpt/model/llm_out/proxy_llm.py @@ -34,7 +34,7 @@ def proxyllm_generate_stream( model_name = model_params.model_name default_error_message = f"{model_name} LLM is not supported" generator_function = generator_mapping.get( - model_name, lambda: default_error_message + model_name, lambda *args: [default_error_message] ) yield from generator_function(model, tokenizer, params, device, context_len)