mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-07-24 20:47:46 +00:00
fix:return conv_uid and update wechat
This commit is contained in:
parent
0cf08f37b0
commit
ab9d8a370e
Binary file not shown.
Before Width: | Height: | Size: 190 KiB After Width: | Height: | Size: 118 KiB |
@ -439,9 +439,7 @@ async def stream_generator(chat, incremental: bool, model_name: str):
|
||||
_type_: streaming responses
|
||||
"""
|
||||
span = root_tracer.start_span("stream_generator")
|
||||
msg = "[LLM_ERROR]: llm server has no output, maybe your prompt template is wrong."
|
||||
|
||||
stream_id = f"chatcmpl-{str(uuid.uuid1())}"
|
||||
previous_response = ""
|
||||
async for chunk in chat.stream_call():
|
||||
if chunk:
|
||||
@ -453,7 +451,7 @@ async def stream_generator(chat, incremental: bool, model_name: str):
|
||||
delta=DeltaMessage(role="assistant", content=incremental_output),
|
||||
)
|
||||
chunk = ChatCompletionStreamResponse(
|
||||
id=stream_id, choices=[choice_data], model=model_name
|
||||
id=chat.chat_session_id, choices=[choice_data], model=model_name
|
||||
)
|
||||
yield f"data: {chunk.json(exclude_unset=True, ensure_ascii=False)}\n\n"
|
||||
else:
|
||||
|
@ -247,7 +247,7 @@ class Client:
|
||||
if response.status_code == 200:
|
||||
async for line in response.aiter_lines():
|
||||
try:
|
||||
if line == "data: [DONE]\n":
|
||||
if line.strip() == "data: [DONE]":
|
||||
break
|
||||
if line.startswith("data:"):
|
||||
json_data = json.loads(line[len("data: ") :])
|
||||
|
Loading…
Reference in New Issue
Block a user