Compare commits

...

15 Commits

Author SHA1 Message Date
Chester Curme
f44ab8b4a0 update bind_tools in community 2024-07-11 16:11:01 -04:00
Chester Curme
10fe5fbb18 fix 2024-07-11 15:53:27 -04:00
Chester Curme
d4405aba79 fix ollama functions 2024-07-11 15:52:32 -04:00
Chester Curme
345c597d5c update BaseChatModel.bind_tools 2024-07-11 15:00:44 -04:00
Chester Curme
8032a1e900 update ibm 2024-07-11 14:52:52 -04:00
Chester Curme
d262e7b222 update huggingface 2024-07-11 14:49:16 -04:00
Chester Curme
971b80a8a1 update mistral 2024-07-11 14:41:59 -04:00
Chester Curme
c1c449c500 update groq 2024-07-11 14:39:54 -04:00
Chester Curme
643f72da21 update fireworks 2024-07-11 14:37:14 -04:00
Chester Curme
c2b38a5fe0 update openai docstring 2024-07-11 14:35:42 -04:00
Chester Curme
5f1ba501f3 format 2024-07-11 14:27:48 -04:00
Chester Curme
7814d32206 update anthropic 2024-07-11 14:26:31 -04:00
Chester Curme
62f1a38c78 update openai + azure 2024-07-11 14:25:01 -04:00
Chester Curme
339074f1e7 update standard-tests 2024-07-11 14:24:21 -04:00
Chester Curme
02e2db549b update core 2024-07-11 14:23:10 -04:00
20 changed files with 137 additions and 65 deletions

View File

@@ -614,7 +614,9 @@ class QianfanChatEndpoint(BaseChatModel):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
**kwargs: Any,
) -> Runnable[LanguageModelInput, BaseMessage]:
"""Bind tool-like objects to this chat model.
@@ -623,9 +625,9 @@ class QianfanChatEndpoint(BaseChatModel):
Args:
tools: A list of tool definitions to bind to this chat model.
Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic
models, callables, and BaseTools will be automatically converted to
their schema dictionary representation.
Can be a dictionary, pydantic model, callable, BaseTool, or Runnable.
Pydantic models, callables, BaseTools, and Runnables will be
automatically converted to their schema dictionary representation.
**kwargs: Any additional parameters to pass to the
:class:`~langchain.runnable.Runnable` constructor.
"""

View File

@@ -471,7 +471,9 @@ class ChatDeepInfra(BaseChatModel):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
**kwargs: Any,
) -> Runnable[LanguageModelInput, BaseMessage]:
"""Bind tool-like objects to this chat model.
@@ -480,9 +482,9 @@ class ChatDeepInfra(BaseChatModel):
Args:
tools: A list of tool definitions to bind to this chat model.
Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic
models, callables, and BaseTools will be automatically converted to
their schema dictionary representation.
Can be a dictionary, pydantic model, callable, BaseTool, or Runnable.
Pydantic models, callables, BaseTools, and Runnables will be
automatically converted to their schema dictionary representation.
**kwargs: Any additional parameters to pass to the
:class:`~langchain.runnable.Runnable` constructor.
"""

View File

@@ -418,7 +418,9 @@ class ChatEdenAI(BaseChatModel):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
*,
tool_choice: Optional[
Union[dict, str, Literal["auto", "none", "required", "any"], bool]

View File

@@ -418,7 +418,9 @@ class ChatLiteLLM(BaseChatModel):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
**kwargs: Any,
) -> Runnable[LanguageModelInput, BaseMessage]:
"""Bind tool-like objects to this chat model.
@@ -427,9 +429,9 @@ class ChatLiteLLM(BaseChatModel):
Args:
tools: A list of tool definitions to bind to this chat model.
Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic
models, callables, and BaseTools will be automatically converted to
their schema dictionary representation.
Can be a dictionary, pydantic model, callable, BaseTool, or Runnable.
Pydantic models, callables, BaseTools, and Runnables will be
automatically converted to their schema dictionary representation.
tool_choice: Which tool to require the model to call.
Must be the name of the single provided function or
"auto" to automatically determine which function to call

View File

@@ -334,7 +334,9 @@ class ChatLlamaCpp(BaseChatModel):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
*,
tool_choice: Optional[Union[Dict[str, Dict], bool, str]] = None,
**kwargs: Any,

View File

@@ -775,16 +775,18 @@ class ChatTongyi(BaseChatModel):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
**kwargs: Any,
) -> Runnable[LanguageModelInput, BaseMessage]:
"""Bind tool-like objects to this chat model.
Args:
tools: A list of tool definitions to bind to this chat model.
Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic
models, callables, and BaseTools will be automatically converted to
their schema dictionary representation.
Can be a dictionary, pydantic model, callable, BaseTool, or Runnable.
Pydantic models, callables, BaseTools, and Runnables will be
automatically converted to their schema dictionary representation.
**kwargs: Any additional parameters to pass to the
:class:`~langchain.runnable.Runnable` constructor.
"""

View File

@@ -1044,7 +1044,9 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
**kwargs: Any,
) -> Runnable[LanguageModelInput, BaseMessage]:
raise NotImplementedError()

View File

@@ -27,6 +27,7 @@ from langchain_core.messages import (
ToolMessage,
)
from langchain_core.pydantic_v1 import BaseModel
from langchain_core.runnables import Runnable
from langchain_core.utils.json_schema import dereference_refs
if TYPE_CHECKING:
@@ -189,15 +190,16 @@ def format_tool_to_openai_tool(tool: BaseTool) -> ToolDescription:
def convert_to_openai_function(
function: Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool],
function: Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable],
**kwargs: Any,
) -> Dict[str, Any]:
"""Convert a raw function/class to an OpenAI function.
Args:
function: Either a dictionary, a pydantic.BaseModel class, or a Python function.
If a dictionary is passed in, it is assumed to already be a valid OpenAI
function or a JSON schema with top-level 'title' and 'description' keys
specified.
function: Either a dictionary, a pydantic.BaseModel class, a Python function,
or a Runnable. If a dictionary is passed in, it is assumed to already be
a valid OpenAI function or a JSON schema with top-level 'title' and
'description' keys specified.
Returns:
A dict version of the passed in function which is compatible with the
@@ -224,6 +226,13 @@ def convert_to_openai_function(
return cast(Dict, convert_pydantic_to_openai_function(function))
elif isinstance(function, BaseTool):
return cast(Dict, format_tool_to_openai_function(function))
elif isinstance(function, Runnable):
as_tool_kwargs = {
k: v for k, v in kwargs.items() if k in ("name", "description", "arg_types")
}
return cast(
Dict, format_tool_to_openai_function(function.as_tool(**as_tool_kwargs))
)
elif callable(function):
return cast(Dict, convert_python_function_to_openai_function(function))
else:
@@ -236,15 +245,15 @@ def convert_to_openai_function(
def convert_to_openai_tool(
tool: Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool],
tool: Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable],
) -> Dict[str, Any]:
"""Convert a raw function/class to an OpenAI tool.
Args:
tool: Either a dictionary, a pydantic.BaseModel class, Python function, or
BaseTool. If a dictionary is passed in, it is assumed to already be a valid
OpenAI tool, OpenAI function, or a JSON schema with top-level 'title' and
'description' keys specified.
tool: Either a dictionary, a pydantic.BaseModel class, Python function,
BaseTool, or Runnable. If a dictionary is passed in, it is assumed
to already be a valid OpenAI tool, OpenAI function, or a JSON schema
with top-level 'title' and 'description' keys specified.
Returns:
A dict version of the passed in tool which is compatible with the

View File

@@ -326,7 +326,9 @@ class FakeStructuredOutputModel(BaseChatModel):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
**kwargs: Any,
) -> Runnable[LanguageModelInput, BaseMessage]:
return self.bind(tools=tools)
@@ -356,7 +358,9 @@ class FakeModel(BaseChatModel):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
**kwargs: Any,
) -> Runnable[LanguageModelInput, BaseMessage]:
return self.bind(tools=tools)

View File

@@ -188,7 +188,6 @@ def test_convert_to_openai_function(
assert actual == expected
# Test runnables
actual = convert_to_openai_function(runnable.as_tool(description="dummy function"))
parameters = {
"type": "object",
"properties": {
@@ -202,7 +201,11 @@ def test_convert_to_openai_function(
}
runnable_expected = expected.copy()
runnable_expected["parameters"] = parameters
assert actual == runnable_expected
for actual in [
convert_to_openai_function(runnable.as_tool(description="dummy function")),
convert_to_openai_function(runnable, description="dummy function"),
]:
assert actual == runnable_expected
def test_convert_to_openai_function_nested() -> None:

View File

@@ -143,9 +143,16 @@ class OllamaFunctions(ChatOllama):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
**kwargs: Any,
) -> Runnable[LanguageModelInput, BaseMessage]:
if any(isinstance(tool, Runnable) for tool in tools):
raise NotImplementedError(
"OllamaFunctions does not support binding Runnables as tools. "
"Use Runnable.as_tool() to convert the runnable to a tool."
)
return self.bind(functions=tools, **kwargs)
def with_structured_output(

View File

@@ -774,7 +774,9 @@ class ChatAnthropic(BaseChatModel):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
*,
tool_choice: Optional[
Union[Dict[str, str], Literal["any", "auto"], str]
@@ -785,9 +787,9 @@ class ChatAnthropic(BaseChatModel):
Args:
tools: A list of tool definitions to bind to this chat model.
Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic
models, callables, and BaseTools will be automatically converted to
their schema dictionary representation.
Can be a dictionary, pydantic model, callable, BaseTool, or Runnable.
Pydantic models, callables, BaseTools, and Runnables will be
automatically converted to their schema dictionary representation.
tool_choice: Which tool to require the model to call.
Options are:
name of the tool (str): calls corresponding tool;
@@ -1024,7 +1026,7 @@ class AnthropicTool(TypedDict):
def convert_to_anthropic_tool(
tool: Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool],
tool: Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable],
) -> AnthropicTool:
"""Convert a tool-like object to an Anthropic tool definition."""
# already in Anthropic tool format

View File

@@ -665,7 +665,9 @@ class ChatFireworks(BaseChatModel):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
*,
tool_choice: Optional[
Union[dict, str, Literal["auto", "any", "none"], bool]
@@ -678,9 +680,9 @@ class ChatFireworks(BaseChatModel):
Args:
tools: A list of tool definitions to bind to this chat model.
Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic
models, callables, and BaseTools will be automatically converted to
their schema dictionary representation.
Can be a dictionary, pydantic model, callable, BaseTool, or Runnable.
Pydantic models, callables, BaseTools, and Runnables will be
automatically converted to their schema dictionary representation.
tool_choice: Which tool to require the model to call.
Must be the name of the single provided function,
"auto" to automatically determine which function to call

View File

@@ -778,7 +778,9 @@ class ChatGroq(BaseChatModel):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
*,
tool_choice: Optional[
Union[dict, str, Literal["auto", "any", "none"], bool]
@@ -789,9 +791,9 @@ class ChatGroq(BaseChatModel):
Args:
tools: A list of tool definitions to bind to this chat model.
Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic
models, callables, and BaseTools will be automatically converted to
their schema dictionary representation.
Can be a dictionary, pydantic model, callable, BaseTool, or Runnable.
Pydantic models, callables, BaseTools, and Runnables will be
automatically converted to their schema dictionary representation.
tool_choice: Which tool to require the model to call.
Must be the name of the single provided function,
"auto" to automatically determine which function to call

View File

@@ -469,7 +469,9 @@ class ChatHuggingFace(BaseChatModel):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
*,
tool_choice: Optional[Union[dict, str, Literal["auto", "none"], bool]] = None,
**kwargs: Any,
@@ -480,9 +482,9 @@ class ChatHuggingFace(BaseChatModel):
Args:
tools: A list of tool definitions to bind to this chat model.
Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic
models, callables, and BaseTools will be automatically converted to
their schema dictionary representation.
Can be a dictionary, pydantic model, callable, BaseTool, or Runnable.
Pydantic models, callables, BaseTools, and Runnables will be
automatically converted to their schema dictionary representation.
tool_choice: Which tool to require the model to call.
Must be the name of the single provided function or
"auto" to automatically determine which function to call

View File

@@ -682,16 +682,18 @@ Remember, even when answering to the user, you must still use this only JSON for
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
**kwargs: Any,
) -> Runnable[LanguageModelInput, BaseMessage]:
"""Bind tool-like objects to this chat model.
Args:
tools: A list of tool definitions to bind to this chat model.
Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic
models, callables, and BaseTools will be automatically converted to
their schema dictionary representation.
Can be a dictionary, pydantic model, callable, BaseTool, or Runnable.
Pydantic models, callables, BaseTools, and Runnables will be
automatically converted to their schema dictionary representation.
**kwargs: Any additional parameters to pass to the
:class:`~langchain.runnable.Runnable` constructor.
"""

View File

@@ -602,7 +602,9 @@ class ChatMistralAI(BaseChatModel):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
**kwargs: Any,
) -> Runnable[LanguageModelInput, BaseMessage]:
"""Bind tool-like objects to this chat model.
@@ -611,9 +613,9 @@ class ChatMistralAI(BaseChatModel):
Args:
tools: A list of tool definitions to bind to this chat model.
Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic
models, callables, and BaseTools will be automatically converted to
their schema dictionary representation.
Can be a dictionary, pydantic model, callable, BaseTool, or Runnable.
Pydantic models, callables, BaseTools, and Runnables will be
automatically converted to their schema dictionary representation.
tool_choice: Which tool to require the model to call.
Must be the name of the single provided function or
"auto" to automatically determine which function to call

View File

@@ -636,7 +636,9 @@ class AzureChatOpenAI(BaseChatOpenAI):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
*,
tool_choice: Optional[
Union[dict, str, Literal["auto", "none", "required", "any"], bool]

View File

@@ -870,7 +870,9 @@ class BaseChatOpenAI(BaseChatModel):
def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
tools: Sequence[
Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool, Runnable]
],
*,
tool_choice: Optional[
Union[dict, str, Literal["auto", "none", "required", "any"], bool]
@@ -883,9 +885,9 @@ class BaseChatOpenAI(BaseChatModel):
Args:
tools: A list of tool definitions to bind to this chat model.
Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic
models, callables, and BaseTools will be automatically converted to
their schema dictionary representation.
Can be a dictionary, pydantic model, callable, BaseTool, or Runnable.
Pydantic models, callables, BaseTools, and Runnables will be
automatically converted to their schema dictionary representation.
tool_choice: Which tool to require the model to call.
Options are:
name of the tool (str): calls corresponding tool;

View File

@@ -4,7 +4,7 @@ from typing import List, Optional
import httpx
import pytest
from langchain_core.language_models import BaseChatModel
from langchain_core.language_models import BaseChatModel, GenericFakeChatModel
from langchain_core.messages import (
AIMessage,
AIMessageChunk,
@@ -14,6 +14,8 @@ from langchain_core.messages import (
SystemMessage,
ToolMessage,
)
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_core.tools import tool
@@ -171,6 +173,23 @@ class ChatModelIntegrationTests(ChatModelTests):
assert isinstance(full, AIMessage)
_validate_tool_call_message_no_args(full)
def test_bind_runnables_as_tools(self, model: BaseChatModel) -> None:
if not self.has_tool_calling:
pytest.skip("Test requires tool calling.")
prompt = ChatPromptTemplate.from_messages(
[("human", "Hello. Please respond in the style of {answer_style}.")]
)
llm = GenericFakeChatModel(messages=iter(["hello matey"]))
chain = prompt | llm | StrOutputParser()
model_with_tools = model.bind_tools([chain])
query = "Using the tool, ask a Pirate how it would say hello."
result = model_with_tools.invoke(query)
assert isinstance(result, AIMessage)
assert result.tool_calls
tool_call = result.tool_calls[0]
assert tool_call["args"].get("answer_style")
def test_structured_output(self, model: BaseChatModel) -> None:
if not self.has_tool_calling:
pytest.skip("Test requires tool calling.")