feat(model): Passing stop parameter to proxyllm (#2077)

This commit is contained in:
Fangyin Cheng
2024-10-18 14:02:53 +08:00
committed by GitHub
parent cf192a5fb7
commit 53ba6259d2
13 changed files with 31 additions and 4 deletions

View File

@@ -21,6 +21,7 @@ def tongyi_generate_stream(
temperature=params.get("temperature"),
context=context,
max_new_tokens=params.get("max_new_tokens"),
stop=params.get("stop"),
)
for r in client.sync_generate_stream(request):
yield r
@@ -96,6 +97,7 @@ class TongyiLLMClient(ProxyLLMClient):
top_p=0.8,
stream=True,
result_format="message",
stop=request.stop,
)
for r in res:
if r: