langchain[patch]: fix ollama in init_chat_model (#24832)

This commit is contained in:
Bagatur 2024-07-30 11:38:53 -07:00 committed by GitHub
parent d8f3ea82db
commit 37b060112a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -102,6 +102,13 @@ def init_chat_model(
.. versionchanged:: 0.2.8
Support for ``configurable_fields`` and ``config_prefix`` added.
.. versionchanged:: 0.2.12
Support for Ollama via langchain-ollama package added. Previously
langchain-community version of Ollama (now deprecated) was installed by default.
Args:
model: The name of the model, e.g. "gpt-4o", "claude-3-opus-20240229".
model_provider: The model provider. Supported model_provider values and the
@ -118,7 +125,7 @@ def init_chat_model(
- mistralai (langchain-mistralai)
- huggingface (langchain-huggingface)
- groq (langchain-groq)
- ollama (langchain-ollama)
- ollama (langchain-ollama) [support added in langchain==0.2.12]
Will attempt to infer model_provider from model if not specified. The
following providers will be inferred based on these model prefixes:
@ -340,8 +347,16 @@ def _init_chat_model_helper(
_check_pkg("langchain_ollama")
from langchain_ollama import ChatOllama
except ImportError:
pass
# For backwards compatibility
try:
_check_pkg("langchain_community")
from langchain_community import ChatOllama
from langchain_community.chat_models import ChatOllama
except ImportError:
# If both langchain-ollama and langchain-community aren't available, raise
# an error related to langchain-ollama
_check_pkg("langchain_ollama")
return ChatOllama(model=model, **kwargs)
elif model_provider == "together":