mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-21 14:18:52 +00:00
community: Add logprobs in gen output (#14826)
Now that it's supported again for OAI chat models . Shame this wouldn't include it in the `.invoke()` output though (it's not included in the message itself). Would need to do a follow-up for that to be the case
This commit is contained in:
parent
c316731d0f
commit
2d91d2b978
@ -454,9 +454,12 @@ class ChatOpenAI(BaseChatModel):
|
|||||||
response = response.dict()
|
response = response.dict()
|
||||||
for res in response["choices"]:
|
for res in response["choices"]:
|
||||||
message = convert_dict_to_message(res["message"])
|
message = convert_dict_to_message(res["message"])
|
||||||
|
generation_info = dict(finish_reason=res.get("finish_reason"))
|
||||||
|
if "logprobs" in res:
|
||||||
|
generation_info["logprobs"] = res["logprobs"]
|
||||||
gen = ChatGeneration(
|
gen = ChatGeneration(
|
||||||
message=message,
|
message=message,
|
||||||
generation_info=dict(finish_reason=res.get("finish_reason")),
|
generation_info=generation_info,
|
||||||
)
|
)
|
||||||
generations.append(gen)
|
generations.append(gen)
|
||||||
token_usage = response.get("usage", {})
|
token_usage = response.get("usage", {})
|
||||||
|
Loading…
Reference in New Issue
Block a user