Compare commits

...

3 Commits

Author SHA1 Message Date
Mason Daugherty
8f1f9e4879 Merge branch 'master' into mdrxy/fix-oai-stream 2025-09-08 16:20:22 -04:00
Mason Daugherty
d55d912a22 switch order 2025-08-26 23:52:59 -04:00
Mason Daugherty
6432647743 fix(core): _should_stream respects streaming param 2025-08-26 22:16:07 -04:00
3 changed files with 11 additions and 7 deletions

View File

@@ -457,6 +457,10 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
if "stream" in kwargs:
return kwargs["stream"]
# Check if streaming has been disabled via the streaming parameter.
if not getattr(self, "streaming", True):
return False
# Check if any streaming callback handlers have been passed in.
handlers = run_manager.handlers if run_manager else []
return any(isinstance(h, _StreamingCallbackHandler) for h in handlers)

View File

@@ -350,7 +350,7 @@ def mock_glm4_completion() -> list:
async def test_glm4_astream(mock_glm4_completion: list) -> None:
llm_name = "glm-4"
llm = ChatOpenAI(model=llm_name, stream_usage=True)
llm = ChatOpenAI(model=llm_name, stream_usage=True, streaming=True)
mock_client = AsyncMock()
async def mock_create(*args: Any, **kwargs: Any) -> MockAsyncContextManager:
@@ -375,7 +375,7 @@ async def test_glm4_astream(mock_glm4_completion: list) -> None:
def test_glm4_stream(mock_glm4_completion: list) -> None:
llm_name = "glm-4"
llm = ChatOpenAI(model=llm_name, stream_usage=True)
llm = ChatOpenAI(model=llm_name, stream_usage=True, streaming=True)
mock_client = MagicMock()
def mock_create(*args: Any, **kwargs: Any) -> MockSyncContextManager:
@@ -431,7 +431,7 @@ def mock_deepseek_completion() -> list[dict]:
async def test_deepseek_astream(mock_deepseek_completion: list) -> None:
llm_name = "deepseek-chat"
llm = ChatOpenAI(model=llm_name, stream_usage=True)
llm = ChatOpenAI(model=llm_name, stream_usage=True, streaming=True)
mock_client = AsyncMock()
async def mock_create(*args: Any, **kwargs: Any) -> MockAsyncContextManager:
@@ -455,7 +455,7 @@ async def test_deepseek_astream(mock_deepseek_completion: list) -> None:
def test_deepseek_stream(mock_deepseek_completion: list) -> None:
llm_name = "deepseek-chat"
llm = ChatOpenAI(model=llm_name, stream_usage=True)
llm = ChatOpenAI(model=llm_name, stream_usage=True, streaming=True)
mock_client = MagicMock()
def mock_create(*args: Any, **kwargs: Any) -> MockSyncContextManager:
@@ -499,7 +499,7 @@ def mock_openai_completion() -> list[dict]:
async def test_openai_astream(mock_openai_completion: list) -> None:
llm_name = "gpt-4o"
llm = ChatOpenAI(model=llm_name, stream_usage=True)
llm = ChatOpenAI(model=llm_name, stream_usage=True, streaming=True)
mock_client = AsyncMock()
async def mock_create(*args: Any, **kwargs: Any) -> MockAsyncContextManager:
@@ -523,7 +523,7 @@ async def test_openai_astream(mock_openai_completion: list) -> None:
def test_openai_stream(mock_openai_completion: list) -> None:
llm_name = "gpt-4o"
llm = ChatOpenAI(model=llm_name, stream_usage=True)
llm = ChatOpenAI(model=llm_name, stream_usage=True, streaming=True)
mock_client = MagicMock()
def mock_create(*args: Any, **kwargs: Any) -> MockSyncContextManager:

View File

@@ -621,7 +621,7 @@ def _strip_none(obj: Any) -> Any:
def test_responses_stream() -> None:
llm = ChatOpenAI(model="o4-mini", output_version="responses/v1")
llm = ChatOpenAI(model="o4-mini", output_version="responses/v1", streaming=True)
mock_client = MagicMock()
def mock_create(*args: Any, **kwargs: Any) -> MockSyncContextManager: