proxy llm, support stream format (#190)

This commit is contained in:
Aries-ckt 2023-06-12 22:11:45 +08:00 committed by GitHub
commit 17b190b691

View File

@ -76,7 +76,13 @@ def proxyllm_generate_stream(model, tokenizer, params, device, context_len=2048)
for line in res.iter_lines():
if line:
decoded_line = line.decode("utf-8")
json_line = json.loads(decoded_line)
print(json_line)
text += json_line["choices"][0]["message"]["content"]
yield text
try:
json_line = json.loads(decoded_line)
print(json_line)
text += json_line["choices"][0]["message"]["content"]
yield text
except Exception as e:
text += decoded_line
yield json.loads(text)["choices"][0]["message"]["content"]