From 0185010b882de38bc16c0c5000646689731f2ef1 Mon Sep 17 00:00:00 2001 From: ccurme Date: Fri, 3 Jan 2025 10:14:07 -0500 Subject: [PATCH] community[patch]: additional check for prompt caching support (#29008) Prompt caching explicitly excludes `gpt-4o-2024-05-13`: https://platform.openai.com/docs/guides/prompt-caching Resolves https://github.com/langchain-ai/langchain/issues/28997 --- libs/community/langchain_community/callbacks/openai_info.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/libs/community/langchain_community/callbacks/openai_info.py b/libs/community/langchain_community/callbacks/openai_info.py index 12f42a566ee..5796065f1ee 100644 --- a/libs/community/langchain_community/callbacks/openai_info.py +++ b/libs/community/langchain_community/callbacks/openai_info.py @@ -204,8 +204,10 @@ def standardize_model_name( or ("finetuned" in model_name and "legacy" not in model_name) ): return model_name + "-completion" - if token_type == TokenType.PROMPT_CACHED and ( - model_name.startswith("gpt-4o") or model_name.startswith("o1") + if ( + token_type == TokenType.PROMPT_CACHED + and (model_name.startswith("gpt-4o") or model_name.startswith("o1")) + and not (model_name.startswith("gpt-4o-2024-05-13")) ): return model_name + "-cached" else: