openai[patch]: fix key collision and _astream (#24345)

Fixes small issues introduced in
https://github.com/langchain-ai/langchain/pull/24150 (unreleased).
This commit is contained in:
ccurme 2024-07-17 12:59:26 -04:00 committed by GitHub
parent bcb5f354ad
commit 4cf67084d3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -631,8 +631,11 @@ class BaseChatOpenAI(BaseChatModel):
"output_tokens": token_usage.get("completion_tokens", 0), "output_tokens": token_usage.get("completion_tokens", 0),
"total_tokens": token_usage.get("total_tokens", 0), "total_tokens": token_usage.get("total_tokens", 0),
} }
generation_info = dict( generation_info = generation_info or {}
finish_reason=res.get("finish_reason"), **(generation_info or {}) generation_info["finish_reason"] = (
res.get("finish_reason")
if res.get("finish_reason") is not None
else generation_info.get("finish_reason")
) )
if "logprobs" in res: if "logprobs" in res:
generation_info["logprobs"] = res["logprobs"] generation_info["logprobs"] = res["logprobs"]
@ -660,7 +663,7 @@ class BaseChatOpenAI(BaseChatModel):
response = raw_response.parse() response = raw_response.parse()
base_generation_info = {"headers": dict(raw_response.headers)} base_generation_info = {"headers": dict(raw_response.headers)}
else: else:
response = self.async_client.create(**payload) response = await self.async_client.create(**payload)
base_generation_info = {} base_generation_info = {}
async with response: async with response:
is_first_chunk = True is_first_chunk = True