From 994c5465e08abfcc67293f9bbccddf17f4daaed7 Mon Sep 17 00:00:00 2001 From: Jawahar S Date: Wed, 12 Feb 2025 05:04:29 +0530 Subject: [PATCH] feat: add support for IBM WatsonX AI chat models (#29688) **Description:** Updated init_chat_model to support Granite models deployed on IBM WatsonX **Dependencies:** [langchain-ibm](https://github.com/langchain-ai/langchain-ibm) Tagging @baskaryan @efriis for review when you get a chance. --- libs/langchain/langchain/chat_models/base.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/libs/langchain/langchain/chat_models/base.py b/libs/langchain/langchain/chat_models/base.py index 8799f627e0b..1a2ead99043 100644 --- a/libs/langchain/langchain/chat_models/base.py +++ b/libs/langchain/langchain/chat_models/base.py @@ -118,6 +118,7 @@ def init_chat_model( - 'ollama' -> langchain-ollama - 'google_anthropic_vertex' -> langchain-google-vertexai - 'deepseek' -> langchain-deepseek + - 'ibm' -> langchain-ibm - 'nvidia' -> langchain-nvidia-ai-endpoints Will attempt to infer model_provider from model if not specified. The @@ -428,6 +429,11 @@ def _init_chat_model_helper( from langchain_nvidia_ai_endpoints import ChatNVIDIA return ChatNVIDIA(model=model, **kwargs) + elif model_provider == "ibm": + _check_pkg("langchain_ibm") + from langchain_ibm import ChatWatsonx + + return ChatWatsonx(model_id=model, **kwargs) else: supported = ", ".join(_SUPPORTED_PROVIDERS) raise ValueError( @@ -453,6 +459,7 @@ _SUPPORTED_PROVIDERS = { "bedrock_converse", "google_anthropic_vertex", "deepseek", + "ibm", }