fix:return conv_uid and update wechat

This commit is contained in:
aries_ckt
2024-03-21 15:59:46 +08:00
parent 0cf08f37b0
commit ab9d8a370e
3 changed files with 2 additions and 4 deletions

View File

@@ -439,9 +439,7 @@ async def stream_generator(chat, incremental: bool, model_name: str):
_type_: streaming responses
"""
span = root_tracer.start_span("stream_generator")
msg = "[LLM_ERROR]: llm server has no output, maybe your prompt template is wrong."
stream_id = f"chatcmpl-{str(uuid.uuid1())}"
previous_response = ""
async for chunk in chat.stream_call():
if chunk:
@@ -453,7 +451,7 @@ async def stream_generator(chat, incremental: bool, model_name: str):
delta=DeltaMessage(role="assistant", content=incremental_output),
)
chunk = ChatCompletionStreamResponse(
id=stream_id, choices=[choice_data], model=model_name
id=chat.chat_session_id, choices=[choice_data], model=model_name
)
yield f"data: {chunk.json(exclude_unset=True, ensure_ascii=False)}\n\n"
else:

View File

@@ -247,7 +247,7 @@ class Client:
if response.status_code == 200:
async for line in response.aiter_lines():
try:
if line == "data: [DONE]\n":
if line.strip() == "data: [DONE]":
break
if line.startswith("data:"):
json_data = json.loads(line[len("data: ") :])