From 40b55a2d27c7d249e7a5e3fdfae542f332e4ae37 Mon Sep 17 00:00:00 2001 From: csunny Date: Fri, 9 Jun 2023 16:19:47 +0800 Subject: [PATCH] fix: next-ui output --- pilot/model/llm_out/proxy_llm.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/pilot/model/llm_out/proxy_llm.py b/pilot/model/llm_out/proxy_llm.py index 68512ec3c..ff00620f1 100644 --- a/pilot/model/llm_out/proxy_llm.py +++ b/pilot/model/llm_out/proxy_llm.py @@ -76,7 +76,13 @@ def proxyllm_generate_stream(model, tokenizer, params, device, context_len=2048) for line in res.iter_lines(): if line: decoded_line = line.decode("utf-8") - json_line = json.loads(decoded_line) - print(json_line) - text += json_line["choices"][0]["message"]["content"] - yield text + try: + json_line = json.loads(decoded_line) + print(json_line) + text += json_line["choices"][0]["message"]["content"] + yield text + except Exception as e: + text += decoded_line + yield json.loads(text)["choices"][0]["message"]["content"] + +