fix: Modify the order of init_chat_model import ollama package. (#24977)

This commit is contained in:
gbaian10 2024-08-02 23:32:56 +08:00 committed by GitHub
parent fe1820cdaf
commit 54e9ea433a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -347,16 +347,14 @@ def _init_chat_model_helper(
_check_pkg("langchain_ollama") _check_pkg("langchain_ollama")
from langchain_ollama import ChatOllama from langchain_ollama import ChatOllama
except ImportError: except ImportError:
pass # For backwards compatibility
try:
# For backwards compatibility _check_pkg("langchain_community")
try: from langchain_community.chat_models import ChatOllama
_check_pkg("langchain_community") except ImportError:
from langchain_community.chat_models import ChatOllama # If both langchain-ollama and langchain-community aren't available,
except ImportError: # raise an error related to langchain-ollama
# If both langchain-ollama and langchain-community aren't available, raise _check_pkg("langchain_ollama")
# an error related to langchain-ollama
_check_pkg("langchain_ollama")
return ChatOllama(model=model, **kwargs) return ChatOllama(model=model, **kwargs)
elif model_provider == "together": elif model_provider == "together":