fix: proxy llm raise exception when proxy llm response has error (#303)

Close #302
This commit is contained in:
magic.chen 2023-07-07 17:27:13 +08:00 committed by GitHub
commit 3dbbc854ec

View File

@ -77,11 +77,15 @@ def proxyllm_generate_stream(model, tokenizer, params, device, context_len=2048)
text = "" text = ""
for line in res.iter_lines(): for line in res.iter_lines():
if line: if line:
json_data = line.split(b": ", 1)[1] if not line.startswith(b"data: "):
decoded_line = json_data.decode("utf-8") error_message = line.decode("utf-8")
if decoded_line.lower() != "[DONE]".lower(): yield error_message
obj = json.loads(json_data) else:
if obj["choices"][0]["delta"].get("content") is not None: json_data = line.split(b": ", 1)[1]
content = obj["choices"][0]["delta"]["content"] decoded_line = json_data.decode("utf-8")
text += content if decoded_line.lower() != "[DONE]".lower():
yield text obj = json.loads(json_data)
if obj["choices"][0]["delta"].get("content") is not None:
content = obj["choices"][0]["delta"]["content"]
text += content
yield text