From acc2c5806c01ca8626bd3ec636f6d097b14ed730 Mon Sep 17 00:00:00 2001 From: xuyuan23 <643854343@qq.com> Date: Tue, 13 Jun 2023 17:45:49 +0800 Subject: [PATCH] remove comment code --- pilot/model/llm_out/proxy_llm.py | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/pilot/model/llm_out/proxy_llm.py b/pilot/model/llm_out/proxy_llm.py index e0ec78dd9..717311778 100644 --- a/pilot/model/llm_out/proxy_llm.py +++ b/pilot/model/llm_out/proxy_llm.py @@ -84,24 +84,3 @@ def proxyllm_generate_stream(model, tokenizer, params, device, context_len=2048) content = obj["choices"][0]["delta"]["content"] text += content yield text - - # native result. - # payloads = { - # "model": "gpt-3.5-turbo", # just for test, remove this later - # "messages": history, - # "temperature": params.get("temperature"), - # "max_tokens": params.get("max_new_tokens"), - # } - # - # res = requests.post( - # CFG.proxy_server_url, headers=headers, json=payloads, stream=True - # ) - # - # text = "" - # line = res.content - # if line: - # decoded_line = line.decode("utf-8") - # json_line = json.loads(decoded_line) - # print(json_line) - # text += json_line["choices"][0]["message"]["content"] - # yield text