mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-25 04:49:17 +00:00
standard-tests[patch]: fix oai usage metadata test (#27122)
This commit is contained in:
@@ -238,30 +238,6 @@ async def test_async_chat_openai_bind_functions() -> None:
|
||||
assert isinstance(generation, AIMessage)
|
||||
|
||||
|
||||
def test_chat_openai_extra_kwargs() -> None:
|
||||
"""Test extra kwargs to chat openai."""
|
||||
# Check that foo is saved in extra_kwargs.
|
||||
llm = ChatOpenAI(foo=3, max_tokens=10) # type: ignore[call-arg]
|
||||
assert llm.max_tokens == 10
|
||||
assert llm.model_kwargs == {"foo": 3}
|
||||
|
||||
# Test that if extra_kwargs are provided, they are added to it.
|
||||
llm = ChatOpenAI(foo=3, model_kwargs={"bar": 2}) # type: ignore[call-arg]
|
||||
assert llm.model_kwargs == {"foo": 3, "bar": 2}
|
||||
|
||||
# Test that if provided twice it errors
|
||||
with pytest.raises(ValueError):
|
||||
ChatOpenAI(foo=3, model_kwargs={"foo": 2}) # type: ignore[call-arg]
|
||||
|
||||
# Test that if explicit param is specified in kwargs it errors
|
||||
with pytest.raises(ValueError):
|
||||
ChatOpenAI(model_kwargs={"temperature": 0.2})
|
||||
|
||||
# Test that "model" cannot be specified in kwargs
|
||||
with pytest.raises(ValueError):
|
||||
ChatOpenAI(model_kwargs={"model": "gpt-3.5-turbo-instruct"})
|
||||
|
||||
|
||||
@pytest.mark.scheduled
|
||||
def test_openai_streaming() -> None:
|
||||
"""Test streaming tokens from OpenAI."""
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Standard LangChain interface tests"""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import List, Literal, Type, cast
|
||||
from typing import Dict, List, Literal, Type, cast
|
||||
|
||||
from langchain_core.language_models import BaseChatModel
|
||||
from langchain_core.messages import AIMessage
|
||||
@@ -28,16 +28,19 @@ class TestOpenAIStandard(ChatModelIntegrationTests):
|
||||
@property
|
||||
def supported_usage_metadata_details(
|
||||
self,
|
||||
) -> List[
|
||||
Literal[
|
||||
"audio_input",
|
||||
"audio_output",
|
||||
"reasoning_output",
|
||||
"cache_read_input",
|
||||
"cache_creation_input",
|
||||
]
|
||||
) -> Dict[
|
||||
Literal["invoke", "stream"],
|
||||
List[
|
||||
Literal[
|
||||
"audio_input",
|
||||
"audio_output",
|
||||
"reasoning_output",
|
||||
"cache_read_input",
|
||||
"cache_creation_input",
|
||||
]
|
||||
],
|
||||
]:
|
||||
return ["reasoning_output", "cache_read_input"]
|
||||
return {"invoke": ["reasoning_output", "cache_read_input"], "stream": []}
|
||||
|
||||
def invoke_with_cache_read_input(self, *, stream: bool = False) -> AIMessage:
|
||||
with open(REPO_ROOT_DIR / "README.md", "r") as f:
|
||||
|
Reference in New Issue
Block a user