diff --git a/libs/community/langchain_community/chat_models/llamacpp.py b/libs/community/langchain_community/chat_models/llamacpp.py index 03eb0054bae..1cdc357c1b5 100644 --- a/libs/community/langchain_community/chat_models/llamacpp.py +++ b/libs/community/langchain_community/chat_models/llamacpp.py @@ -531,15 +531,15 @@ class ChatLlamaCpp(BaseChatModel): "schema must be specified when method is 'function_calling'. " "Received None." ) - llm = self.bind_tools([schema], tool_choice=True) + tool_name = convert_to_openai_tool(schema)["function"]["name"] + llm = self.bind_tools([schema], tool_choice=tool_name) if is_pydantic_schema: output_parser: OutputParserLike = PydanticToolsParser( tools=[cast(Type, schema)], first_tool_only=True ) else: - key_name = convert_to_openai_tool(schema)["function"]["name"] output_parser = JsonOutputKeyToolsParser( - key_name=key_name, first_tool_only=True + key_name=tool_name, first_tool_only=True ) if include_raw: diff --git a/libs/partners/anthropic/langchain_anthropic/chat_models.py b/libs/partners/anthropic/langchain_anthropic/chat_models.py index d0b279f170a..d24fab015d9 100644 --- a/libs/partners/anthropic/langchain_anthropic/chat_models.py +++ b/libs/partners/anthropic/langchain_anthropic/chat_models.py @@ -986,7 +986,9 @@ class ChatAnthropic(BaseChatModel): # } """ # noqa: E501 - llm = self.bind_tools([schema], tool_choice="any") + + tool_name = convert_to_anthropic_tool(schema)["name"] + llm = self.bind_tools([schema], tool_choice=tool_name) if isinstance(schema, type) and issubclass(schema, BaseModel): output_parser = ToolsOutputParser( first_tool_only=True, pydantic_schemas=[schema] diff --git a/libs/partners/fireworks/langchain_fireworks/chat_models.py b/libs/partners/fireworks/langchain_fireworks/chat_models.py index c94d16a2d92..7eb7ae70f9f 100644 --- a/libs/partners/fireworks/langchain_fireworks/chat_models.py +++ b/libs/partners/fireworks/langchain_fireworks/chat_models.py @@ -884,15 +884,15 @@ class ChatFireworks(BaseChatModel): "schema must be specified when method is 'function_calling'. " "Received None." ) - llm = self.bind_tools([schema], tool_choice=True) + tool_name = convert_to_openai_tool(schema)["function"]["name"] + llm = self.bind_tools([schema], tool_choice=tool_name) if is_pydantic_schema: output_parser: OutputParserLike = PydanticToolsParser( tools=[schema], first_tool_only=True ) else: - key_name = convert_to_openai_tool(schema)["function"]["name"] output_parser = JsonOutputKeyToolsParser( - key_name=key_name, first_tool_only=True + key_name=tool_name, first_tool_only=True ) elif method == "json_mode": llm = self.bind(response_format={"type": "json_object"}) diff --git a/libs/partners/groq/langchain_groq/chat_models.py b/libs/partners/groq/langchain_groq/chat_models.py index d791a4c2075..6f758c93a13 100644 --- a/libs/partners/groq/langchain_groq/chat_models.py +++ b/libs/partners/groq/langchain_groq/chat_models.py @@ -1014,15 +1014,15 @@ class ChatGroq(BaseChatModel): "schema must be specified when method is 'function_calling'. " "Received None." ) - llm = self.bind_tools([schema], tool_choice=True) + tool_name = convert_to_openai_tool(schema)["function"]["name"] + llm = self.bind_tools([schema], tool_choice=tool_name) if is_pydantic_schema: output_parser: OutputParserLike = PydanticToolsParser( tools=[schema], first_tool_only=True ) else: - key_name = convert_to_openai_tool(schema)["function"]["name"] output_parser = JsonOutputKeyToolsParser( - key_name=key_name, first_tool_only=True + key_name=tool_name, first_tool_only=True ) elif method == "json_mode": llm = self.bind(response_format={"type": "json_object"}) diff --git a/libs/partners/mistralai/langchain_mistralai/chat_models.py b/libs/partners/mistralai/langchain_mistralai/chat_models.py index 5ae84c6d08f..5892aeeeb08 100644 --- a/libs/partners/mistralai/langchain_mistralai/chat_models.py +++ b/libs/partners/mistralai/langchain_mistralai/chat_models.py @@ -794,6 +794,8 @@ class ChatMistralAI(BaseChatModel): "schema must be specified when method is 'function_calling'. " "Received None." ) + # TODO: Update to pass in tool name as tool_choice if/when Mistral supports + # specifying a tool. llm = self.bind_tools([schema], tool_choice="any") if is_pydantic_schema: output_parser: OutputParserLike = PydanticToolsParser( diff --git a/libs/partners/openai/langchain_openai/chat_models/azure.py b/libs/partners/openai/langchain_openai/chat_models/azure.py index 0d2632b9bf5..faa8d747c12 100644 --- a/libs/partners/openai/langchain_openai/chat_models/azure.py +++ b/libs/partners/openai/langchain_openai/chat_models/azure.py @@ -857,15 +857,15 @@ class AzureChatOpenAI(BaseChatOpenAI): "schema must be specified when method is 'function_calling'. " "Received None." ) - llm = self.bind_tools([schema], tool_choice=True) + tool_name = convert_to_openai_tool(schema)["function"]["name"] + llm = self.bind_tools([schema], tool_choice=tool_name) if is_pydantic_schema: output_parser: OutputParserLike = PydanticToolsParser( tools=[schema], first_tool_only=True ) else: - key_name = convert_to_openai_tool(schema)["function"]["name"] output_parser = JsonOutputKeyToolsParser( - key_name=key_name, first_tool_only=True + key_name=tool_name, first_tool_only=True ) elif method == "json_mode": llm = self.bind(response_format={"type": "json_object"}) diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index 1a1f9561ed1..c9f1045fbee 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -1140,15 +1140,17 @@ class BaseChatOpenAI(BaseChatModel): "schema must be specified when method is 'function_calling'. " "Received None." ) - llm = self.bind_tools([schema], tool_choice=True, parallel_tool_calls=False) + tool_name = convert_to_openai_tool(schema)["function"]["name"] + llm = self.bind_tools( + [schema], tool_choice=tool_name, parallel_tool_calls=False + ) if is_pydantic_schema: output_parser: OutputParserLike = PydanticToolsParser( tools=[schema], first_tool_only=True ) else: - key_name = convert_to_openai_tool(schema)["function"]["name"] output_parser = JsonOutputKeyToolsParser( - key_name=key_name, first_tool_only=True + key_name=tool_name, first_tool_only=True ) elif method == "json_mode": llm = self.bind(response_format={"type": "json_object"})