mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-09-04 18:40:10 +00:00
feat(model): Passing stop parameter to proxyllm (#2077)
This commit is contained in:
@@ -39,6 +39,7 @@ async def chatgpt_generate_stream(
|
||||
temperature=params.get("temperature"),
|
||||
context=context,
|
||||
max_new_tokens=params.get("max_new_tokens"),
|
||||
stop=params.get("stop"),
|
||||
)
|
||||
async for r in client.generate_stream(request):
|
||||
yield r
|
||||
@@ -188,6 +189,8 @@ class OpenAILLMClient(ProxyLLMClient):
|
||||
payload["temperature"] = request.temperature
|
||||
if request.max_new_tokens:
|
||||
payload["max_tokens"] = request.max_new_tokens
|
||||
if request.stop:
|
||||
payload["stop"] = request.stop
|
||||
return payload
|
||||
|
||||
async def generate(
|
||||
|
Reference in New Issue
Block a user