refactor: Refactor proxy LLM (#1064)

This commit is contained in:
Fangyin Cheng
2024-01-14 21:01:37 +08:00
committed by GitHub
parent a035433170
commit 22bfd01c4b
95 changed files with 2049 additions and 1294 deletions

View File

@@ -178,14 +178,15 @@ class FalconChatAdapter(BaseChatAdpter):
return falcon_generate_output
class ProxyllmChatAdapter(BaseChatAdpter):
def match(self, model_path: str):
return "proxyllm" in model_path
def get_generate_stream_func(self, model_path: str):
from dbgpt.model.llm_out.proxy_llm import proxyllm_generate_stream
return proxyllm_generate_stream
#
# class ProxyllmChatAdapter(BaseChatAdpter):
# def match(self, model_path: str):
# return "proxyllm" in model_path
#
# def get_generate_stream_func(self, model_path: str):
# from dbgpt.model.llm_out.proxy_llm import proxyllm_generate_stream
#
# return proxyllm_generate_stream
class GorillaChatAdapter(BaseChatAdpter):
@@ -286,6 +287,6 @@ register_llm_model_chat_adapter(LlamaCppChatAdapter)
register_llm_model_chat_adapter(InternLMChatAdapter)
# Proxy model for test and develop, it's cheap for us now.
register_llm_model_chat_adapter(ProxyllmChatAdapter)
# register_llm_model_chat_adapter(ProxyllmChatAdapter)
register_llm_model_chat_adapter(BaseChatAdpter)