mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-02 19:47:13 +00:00
Have ChatLlamaCpp handle "auto" and "any" for tool_choice
This commit is contained in:
@@ -7,6 +7,7 @@ from typing import (
|
|||||||
Dict,
|
Dict,
|
||||||
Iterator,
|
Iterator,
|
||||||
List,
|
List,
|
||||||
|
Literal,
|
||||||
Mapping,
|
Mapping,
|
||||||
Optional,
|
Optional,
|
||||||
Sequence,
|
Sequence,
|
||||||
@@ -342,15 +343,10 @@ class ChatLlamaCpp(BaseChatModel):
|
|||||||
self,
|
self,
|
||||||
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
|
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
|
||||||
*,
|
*,
|
||||||
tool_choice: Optional[Union[dict, bool, str]] = None,
|
tool_choice: Optional[Union[dict, bool, str, Literal["auto", "any"]]] = None,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> Runnable[LanguageModelInput, BaseMessage]:
|
) -> Runnable[LanguageModelInput, BaseMessage]:
|
||||||
"""Bind tool-like objects to this chat model
|
"""Bind tool-like objects to this chat model """
|
||||||
|
|
||||||
tool_choice: does not currently support "any", "auto" choices like OpenAI
|
|
||||||
tool-calling API. should be a dict of the form to force this tool
|
|
||||||
{"type": "function", "function": {"name": <<tool_name>>}}.
|
|
||||||
"""
|
|
||||||
formatted_tools = [convert_to_openai_tool(tool) for tool in tools]
|
formatted_tools = [convert_to_openai_tool(tool) for tool in tools]
|
||||||
tool_names = [ft["function"]["name"] for ft in formatted_tools]
|
tool_names = [ft["function"]["name"] for ft in formatted_tools]
|
||||||
if tool_choice:
|
if tool_choice:
|
||||||
@@ -363,6 +359,16 @@ class ChatLlamaCpp(BaseChatModel):
|
|||||||
f"provided tools were {tool_names}."
|
f"provided tools were {tool_names}."
|
||||||
)
|
)
|
||||||
elif isinstance(tool_choice, str):
|
elif isinstance(tool_choice, str):
|
||||||
|
if tool_choice == 'any':
|
||||||
|
if len(formatted_tools) == 1:
|
||||||
|
tool_choice = formatted_tools[0]
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"tool_choice `'any'` only supported if one tool is provided."
|
||||||
|
)
|
||||||
|
elif tool_choice == 'auto':
|
||||||
|
tool_choice = None
|
||||||
|
else:
|
||||||
chosen = [
|
chosen = [
|
||||||
f for f in formatted_tools if f["function"]["name"] == tool_choice
|
f for f in formatted_tools if f["function"]["name"] == tool_choice
|
||||||
]
|
]
|
||||||
@@ -386,7 +392,6 @@ class ChatLlamaCpp(BaseChatModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
kwargs["tool_choice"] = tool_choice
|
kwargs["tool_choice"] = tool_choice
|
||||||
formatted_tools = [convert_to_openai_tool(tool) for tool in tools]
|
|
||||||
return super().bind(tools=formatted_tools, **kwargs)
|
return super().bind(tools=formatted_tools, **kwargs)
|
||||||
|
|
||||||
def with_structured_output(
|
def with_structured_output(
|
||||||
|
Reference in New Issue
Block a user