mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-24 15:43:54 +00:00
- Description: Small change to fix broken Azure streaming. More complete migration probably still necessary once the new API behavior is finalized. - Issue: Implements fix by @rock-you in #6462 - Dependencies: N/A There don't seem to be any tests specifically for this, and I was having some trouble adding some. This is just a small temporary fix to allow for the new API changes that OpenAI are releasing without breaking any other code. --------- Co-authored-by: Jacob Swe <jswe@polencapital.com> Co-authored-by: Bagatur <baskaryan@gmail.com>
This commit is contained in:
parent
c1ea8da9bc
commit
0af48b06d0
@ -330,17 +330,20 @@ class ChatOpenAI(BaseChatModel):
|
|||||||
for stream_resp in self.completion_with_retry(
|
for stream_resp in self.completion_with_retry(
|
||||||
messages=message_dicts, **params
|
messages=message_dicts, **params
|
||||||
):
|
):
|
||||||
role = stream_resp["choices"][0]["delta"].get("role", role)
|
if len(stream_resp["choices"]) > 0:
|
||||||
token = stream_resp["choices"][0]["delta"].get("content") or ""
|
role = stream_resp["choices"][0]["delta"].get("role", role)
|
||||||
inner_completion += token
|
token = stream_resp["choices"][0]["delta"].get("content") or ""
|
||||||
_function_call = stream_resp["choices"][0]["delta"].get("function_call")
|
inner_completion += token
|
||||||
if _function_call:
|
_function_call = stream_resp["choices"][0]["delta"].get(
|
||||||
if function_call is None:
|
"function_call"
|
||||||
function_call = _function_call
|
)
|
||||||
else:
|
if _function_call:
|
||||||
function_call["arguments"] += _function_call["arguments"]
|
if function_call is None:
|
||||||
if run_manager:
|
function_call = _function_call
|
||||||
run_manager.on_llm_new_token(token)
|
else:
|
||||||
|
function_call["arguments"] += _function_call["arguments"]
|
||||||
|
if run_manager:
|
||||||
|
run_manager.on_llm_new_token(token)
|
||||||
message = _convert_dict_to_message(
|
message = _convert_dict_to_message(
|
||||||
{
|
{
|
||||||
"content": inner_completion,
|
"content": inner_completion,
|
||||||
@ -393,17 +396,20 @@ class ChatOpenAI(BaseChatModel):
|
|||||||
async for stream_resp in await acompletion_with_retry(
|
async for stream_resp in await acompletion_with_retry(
|
||||||
self, messages=message_dicts, **params
|
self, messages=message_dicts, **params
|
||||||
):
|
):
|
||||||
role = stream_resp["choices"][0]["delta"].get("role", role)
|
if len(stream_resp["choices"]) > 0:
|
||||||
token = stream_resp["choices"][0]["delta"].get("content", "")
|
role = stream_resp["choices"][0]["delta"].get("role", role)
|
||||||
inner_completion += token or ""
|
token = stream_resp["choices"][0]["delta"].get("content", "")
|
||||||
_function_call = stream_resp["choices"][0]["delta"].get("function_call")
|
inner_completion += token or ""
|
||||||
if _function_call:
|
_function_call = stream_resp["choices"][0]["delta"].get(
|
||||||
if function_call is None:
|
"function_call"
|
||||||
function_call = _function_call
|
)
|
||||||
else:
|
if _function_call:
|
||||||
function_call["arguments"] += _function_call["arguments"]
|
if function_call is None:
|
||||||
if run_manager:
|
function_call = _function_call
|
||||||
await run_manager.on_llm_new_token(token)
|
else:
|
||||||
|
function_call["arguments"] += _function_call["arguments"]
|
||||||
|
if run_manager:
|
||||||
|
await run_manager.on_llm_new_token(token)
|
||||||
message = _convert_dict_to_message(
|
message = _convert_dict_to_message(
|
||||||
{
|
{
|
||||||
"content": inner_completion,
|
"content": inner_completion,
|
||||||
|
Loading…
Reference in New Issue
Block a user