diff --git a/libs/partners/ollama/langchain_ollama/__init__.py b/libs/partners/ollama/langchain_ollama/__init__.py index 51860755d7b..4d9864fc6a2 100644 --- a/libs/partners/ollama/langchain_ollama/__init__.py +++ b/libs/partners/ollama/langchain_ollama/__init__.py @@ -1,6 +1,15 @@ """This is the langchain_ollama package. -It provides infrastructure for interacting with the Ollama service. +Provides infrastructure for interacting with the `Ollama `__ +service. + +.. note:: + **Newly added in 0.3.4:** ``validate_model_on_init`` param on all models. + This parameter allows you to validate the model exists in Ollama locally on + initialization. If set to ``True``, it will raise an error if the model does not + exist locally. This is useful for ensuring that the model is available before + attempting to use it, especially in environments where models may not be + pre-downloaded. """ from importlib import metadata diff --git a/libs/partners/ollama/langchain_ollama/chat_models.py b/libs/partners/ollama/langchain_ollama/chat_models.py index 4fbe9bce062..91b2e1aa404 100644 --- a/libs/partners/ollama/langchain_ollama/chat_models.py +++ b/libs/partners/ollama/langchain_ollama/chat_models.py @@ -437,7 +437,10 @@ class ChatOllama(BaseChatModel): unless you set ``reasoning`` to ``True``.""" validate_model_on_init: bool = False - """Whether to validate the model exists in Ollama locally on initialization.""" + """Whether to validate the model exists in Ollama locally on initialization. + + .. versionadded:: 0.3.4 + """ mirostat: Optional[int] = None """Enable Mirostat sampling for controlling perplexity. diff --git a/libs/partners/ollama/langchain_ollama/embeddings.py b/libs/partners/ollama/langchain_ollama/embeddings.py index fedf80b5ba5..c95e2f39b43 100644 --- a/libs/partners/ollama/langchain_ollama/embeddings.py +++ b/libs/partners/ollama/langchain_ollama/embeddings.py @@ -128,7 +128,10 @@ class OllamaEmbeddings(BaseModel, Embeddings): """Model name to use.""" validate_model_on_init: bool = False - """Whether to validate the model exists in ollama locally on initialization.""" + """Whether to validate the model exists in ollama locally on initialization. + + .. versionadded:: 0.3.4 + """ base_url: Optional[str] = None """Base url the model is hosted under.""" diff --git a/libs/partners/ollama/langchain_ollama/llms.py b/libs/partners/ollama/langchain_ollama/llms.py index 89d5040b2f9..f7978ed7c7b 100644 --- a/libs/partners/ollama/langchain_ollama/llms.py +++ b/libs/partners/ollama/langchain_ollama/llms.py @@ -53,7 +53,10 @@ class OllamaLLM(BaseLLM): be present directly within the main response content.""" validate_model_on_init: bool = False - """Whether to validate the model exists in ollama locally on initialization.""" + """Whether to validate the model exists in ollama locally on initialization. + + .. versionadded:: 0.3.4 + """ mirostat: Optional[int] = None """Enable Mirostat sampling for controlling perplexity. diff --git a/uv.lock b/uv.lock index 5343f88fb79..adf48d5e66f 100644 --- a/uv.lock +++ b/uv.lock @@ -2403,7 +2403,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "0.3.69" +version = "0.3.70" source = { editable = "libs/core" } dependencies = [ { name = "jsonpatch" },