fix(model): Fix some model missing stop tokens bug (#734)

This commit is contained in:
Aries-ckt
2023-10-26 15:33:25 +08:00
committed by GitHub
2 changed files with 2 additions and 1 deletions

View File

@@ -164,6 +164,7 @@ class LLMModelAdaper:
# Overwrite model params:
params["stop"] = conv.stop_str
params["stop_token_ids"] = conv.stop_token_ids
return params, model_context

View File

@@ -21,7 +21,7 @@ class ChatFactory(metaclass=Singleton):
implementation = None
for cls in chat_classes:
if cls.chat_scene == chat_mode:
metadata = {"cls": str(cls), "params": kwargs}
metadata = {"cls": str(cls)}
with root_tracer.start_span(
"get_implementation_of_chat", metadata=metadata
):