diff --git a/libs/core/langchain_core/language_models/fake_chat_models.py b/libs/core/langchain_core/language_models/fake_chat_models.py index 184a4fcb154..721176af33d 100644 --- a/libs/core/langchain_core/language_models/fake_chat_models.py +++ b/libs/core/langchain_core/language_models/fake_chat_models.py @@ -15,7 +15,9 @@ from langchain_core.callbacks import ( from langchain_core.language_models.chat_models import BaseChatModel, SimpleChatModel from langchain_core.messages import AIMessage, AIMessageChunk, BaseMessage from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult -from langchain_core.runnables import RunnableConfig +from langchain_core.runnables import RunnableConfig, Runnable, RunnableLambda +from langchain_core.tools import BaseTool +from collections.abc import Sequence, Callable class FakeMessagesListChatModel(BaseChatModel): @@ -206,6 +208,37 @@ class FakeChatModel(SimpleChatModel): def _identifying_params(self) -> dict[str, Any]: return {"key": "fake"} + @override + def bind_tools( + self, + tools: Sequence[Union[dict[str, Any], type, Callable, BaseTool]], + *, + tool_choice: Optional[str] = None, + choice_input: Optional[Union[dict[str, Any], tuple]] = None, + **kwargs: Any, + ) -> Runnable: + def _call_tool_chain(input: dict) -> dict: + if tool_choice is None: + return {} + + if choice_input is None: + raise ValueError( + "choice_input must be provided if tool_choice is specified." + ) + + tool = next(t for t in tools if t.name == tool_choice) + + params = choice_input if choice_input is not None else input + + if hasattr(tool, "run"): + return tool.run(params) + else: + raise ValueError( + f"Tool {tool} is not callable or does not have a 'run' method." + ) + + return RunnableLambda(func=_call_tool_chain) + class GenericFakeChatModel(BaseChatModel): """Generic fake chat model that can be used to test the chat model interface.