mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-09 13:00:34 +00:00
parent
2715bed70e
commit
e8e5d67a8d
@ -2161,16 +2161,18 @@ def _create_usage_metadata(oai_token_usage: dict) -> UsageMetadata:
|
|||||||
output_tokens = oai_token_usage.get("completion_tokens", 0)
|
output_tokens = oai_token_usage.get("completion_tokens", 0)
|
||||||
total_tokens = oai_token_usage.get("total_tokens", input_tokens + output_tokens)
|
total_tokens = oai_token_usage.get("total_tokens", input_tokens + output_tokens)
|
||||||
input_token_details: dict = {
|
input_token_details: dict = {
|
||||||
"audio": oai_token_usage.get("prompt_tokens_details", {}).get("audio_tokens"),
|
"audio": (oai_token_usage.get("prompt_tokens_details") or {}).get(
|
||||||
"cache_read": oai_token_usage.get("prompt_tokens_details", {}).get(
|
"audio_tokens"
|
||||||
|
),
|
||||||
|
"cache_read": (oai_token_usage.get("prompt_tokens_details") or {}).get(
|
||||||
"cached_tokens"
|
"cached_tokens"
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
output_token_details: dict = {
|
output_token_details: dict = {
|
||||||
"audio": oai_token_usage.get("completion_tokens_details", {}).get(
|
"audio": (oai_token_usage.get("completion_tokens_details") or {}).get(
|
||||||
"audio_tokens"
|
"audio_tokens"
|
||||||
),
|
),
|
||||||
"reasoning": oai_token_usage.get("completion_tokens_details", {}).get(
|
"reasoning": (oai_token_usage.get("completion_tokens_details") or {}).get(
|
||||||
"reasoning_tokens"
|
"reasoning_tokens"
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
@ -23,6 +23,7 @@ from langchain_openai import ChatOpenAI
|
|||||||
from langchain_openai.chat_models.base import (
|
from langchain_openai.chat_models.base import (
|
||||||
_convert_dict_to_message,
|
_convert_dict_to_message,
|
||||||
_convert_message_to_dict,
|
_convert_message_to_dict,
|
||||||
|
_create_usage_metadata,
|
||||||
_format_message_content,
|
_format_message_content,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -730,3 +731,21 @@ def test_schema_from_with_structured_output(schema: Type) -> None:
|
|||||||
}
|
}
|
||||||
actual = structured_llm.get_output_schema().model_json_schema()
|
actual = structured_llm.get_output_schema().model_json_schema()
|
||||||
assert actual == expected
|
assert actual == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test__create_usage_metadata() -> None:
|
||||||
|
usage_metadata = {
|
||||||
|
"completion_tokens": 15,
|
||||||
|
"prompt_tokens_details": None,
|
||||||
|
"completion_tokens_details": None,
|
||||||
|
"prompt_tokens": 11,
|
||||||
|
"total_tokens": 26,
|
||||||
|
}
|
||||||
|
result = _create_usage_metadata(usage_metadata)
|
||||||
|
assert result == UsageMetadata(
|
||||||
|
output_tokens=15,
|
||||||
|
input_tokens=11,
|
||||||
|
total_tokens=26,
|
||||||
|
input_token_details={},
|
||||||
|
output_token_details={},
|
||||||
|
)
|
||||||
|
Loading…
Reference in New Issue
Block a user