fix(model): Fix some model missing stop tokens bug

This commit is contained in:
FangYin Cheng
2023-10-26 15:20:06 +08:00
parent 5500590e4f
commit 343c9e8c9b
2 changed files with 2 additions and 1 deletions

View File

@@ -164,6 +164,7 @@ class LLMModelAdaper:
# Overwrite model params:
params["stop"] = conv.stop_str
params["stop_token_ids"] = conv.stop_token_ids
return params, model_context

View File

@@ -21,7 +21,7 @@ class ChatFactory(metaclass=Singleton):
implementation = None
for cls in chat_classes:
if cls.chat_scene == chat_mode:
metadata = {"cls": str(cls), "params": kwargs}
metadata = {"cls": str(cls)}
with root_tracer.start_span(
"get_implementation_of_chat", metadata=metadata
):