fix(ollama): raise error when sync client is not initialized

When self._client is None, _create_chat_stream silently produces an
empty generator. The error only surfaces later as a misleading
"No data received from Ollama stream" message, making it difficult
to diagnose the root cause.

Now raises RuntimeError immediately with a clear message when the sync
client is not initialized.
This commit is contained in:
Yi LIU
2026-02-12 21:54:08 +08:00
parent a50d86c353
commit bc804cdcd3
2 changed files with 20 additions and 3 deletions

View File

@@ -948,12 +948,17 @@ class ChatOllama(BaseChatModel):
stop: list[str] | None = None,
**kwargs: Any,
) -> Iterator[Mapping[str, Any] | str]:
if not self._client:
msg = (
"Ollama sync client is not initialized. "
"Make sure the model was properly constructed."
)
raise RuntimeError(msg)
chat_params = self._chat_params(messages, stop, **kwargs)
if chat_params["stream"]:
if self._client:
yield from self._client.chat(**chat_params)
elif self._client:
yield from self._client.chat(**chat_params)
else:
yield self._client.chat(**chat_params)
def _chat_stream_with_aggregation(

View File

@@ -449,6 +449,18 @@ def test_reasoning_param_passed_to_client() -> None:
assert call_kwargs["think"] is True
def test_create_chat_stream_raises_when_client_none() -> None:
"""Test that _create_chat_stream raises RuntimeError when client is None."""
with patch("langchain_ollama.chat_models.Client") as mock_client_class:
mock_client_class.return_value = MagicMock()
llm = ChatOllama(model="test-model")
# Force _client to None to simulate uninitialized state
llm._client = None
with pytest.raises(RuntimeError, match="sync client is not initialized"):
list(llm._create_chat_stream([HumanMessage("Hello")]))
def test_chat_ollama_ignores_strict_arg() -> None:
"""Test that ChatOllama ignores the 'strict' argument."""
response = [