diff --git a/libs/partners/ollama/langchain_ollama/chat_models.py b/libs/partners/ollama/langchain_ollama/chat_models.py index 80c2ef3467a..1c44db0bd1e 100644 --- a/libs/partners/ollama/langchain_ollama/chat_models.py +++ b/libs/partners/ollama/langchain_ollama/chat_models.py @@ -418,7 +418,7 @@ class ChatOllama(BaseChatModel): model: str """Model name to use.""" - reasoning: Optional[bool] = None + reasoning: Optional[Union[bool, str]] = None """Controls the reasoning/thinking mode for `supported models `__. @@ -431,7 +431,13 @@ class ChatOllama(BaseChatModel): - ``None`` (Default): The model will use its default reasoning behavior. Note however, if the model's default behavior *is* to perform reasoning, think tags ()```` and ````) will be present within the main response content - unless you set ``reasoning`` to ``True``.""" + unless you set ``reasoning`` to ``True``. + - ``str``: e.g. ``'low'``, ``'medium'``, ``'high'``. Enables reasoning with a custom + intensity level. Currently, this is only supported ``gpt-oss``. See the + `Ollama docs `__ + for more information. + + """ validate_model_on_init: bool = False """Whether to validate the model exists in Ollama locally on initialization.