From af3b88f58d2bc3e203fc1f93134a6f6ab33b9cd7 Mon Sep 17 00:00:00 2001 From: Mason Daugherty Date: Fri, 22 Aug 2025 15:11:32 -0400 Subject: [PATCH] feat(ollama): update reasoning type to support string values for custom intensity levels (e.g. `gpt-oss`) (#32650) --- libs/partners/ollama/langchain_ollama/chat_models.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/libs/partners/ollama/langchain_ollama/chat_models.py b/libs/partners/ollama/langchain_ollama/chat_models.py index 80c2ef3467a..1c44db0bd1e 100644 --- a/libs/partners/ollama/langchain_ollama/chat_models.py +++ b/libs/partners/ollama/langchain_ollama/chat_models.py @@ -418,7 +418,7 @@ class ChatOllama(BaseChatModel): model: str """Model name to use.""" - reasoning: Optional[bool] = None + reasoning: Optional[Union[bool, str]] = None """Controls the reasoning/thinking mode for `supported models `__. @@ -431,7 +431,13 @@ class ChatOllama(BaseChatModel): - ``None`` (Default): The model will use its default reasoning behavior. Note however, if the model's default behavior *is* to perform reasoning, think tags ()```` and ````) will be present within the main response content - unless you set ``reasoning`` to ``True``.""" + unless you set ``reasoning`` to ``True``. + - ``str``: e.g. ``'low'``, ``'medium'``, ``'high'``. Enables reasoning with a custom + intensity level. Currently, this is only supported ``gpt-oss``. See the + `Ollama docs `__ + for more information. + + """ validate_model_on_init: bool = False """Whether to validate the model exists in Ollama locally on initialization.