community[patch]: additional check for prompt caching support (#29008)

Prompt caching explicitly excludes `gpt-4o-2024-05-13`:
https://platform.openai.com/docs/guides/prompt-caching

Resolves https://github.com/langchain-ai/langchain/issues/28997
This commit is contained in:
ccurme 2025-01-03 10:14:07 -05:00 committed by GitHub
parent 4de52e7891
commit 0185010b88
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -204,8 +204,10 @@ def standardize_model_name(
or ("finetuned" in model_name and "legacy" not in model_name) or ("finetuned" in model_name and "legacy" not in model_name)
): ):
return model_name + "-completion" return model_name + "-completion"
if token_type == TokenType.PROMPT_CACHED and ( if (
model_name.startswith("gpt-4o") or model_name.startswith("o1") token_type == TokenType.PROMPT_CACHED
and (model_name.startswith("gpt-4o") or model_name.startswith("o1"))
and not (model_name.startswith("gpt-4o-2024-05-13"))
): ):
return model_name + "-cached" return model_name + "-cached"
else: else: