core[patch], community[patch], openai[patch]: consolidate openai tool… (#16485)

… converters

One way to convert anything to an OAI function:
convert_to_openai_function
One way to convert anything to an OAI tool: convert_to_openai_tool
Corresponding bind functions on OAI models: bind_functions, bind_tools
This commit is contained in:
Bagatur
2024-01-25 13:18:46 -08:00
committed by GitHub
parent 148347e858
commit ef42d9d559
25 changed files with 1480 additions and 326 deletions

View File

@@ -5,13 +5,13 @@ from json import JSONDecodeError
from time import sleep
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Union
from langchain_community.tools.convert_to_openai import format_tool_to_openai_tool
from langchain_core.agents import AgentAction, AgentFinish
from langchain_core.callbacks import CallbackManager
from langchain_core.load import dumpd
from langchain_core.pydantic_v1 import Field
from langchain_core.runnables import RunnableConfig, RunnableSerializable, ensure_config
from langchain_core.tools import BaseTool
from langchain_core.utils.function_calling import convert_to_openai_tool
if TYPE_CHECKING:
import openai
@@ -180,16 +180,10 @@ class OpenAIAssistantRunnable(RunnableSerializable[Dict, OutputType]):
OpenAIAssistantRunnable configured to run using the created assistant.
"""
client = client or _get_openai_client()
openai_tools: List = []
for tool in tools:
oai_tool = (
tool if isinstance(tool, dict) else format_tool_to_openai_tool(tool)
)
openai_tools.append(oai_tool)
assistant = client.beta.assistants.create(
name=name,
instructions=instructions,
tools=openai_tools,
tools=[convert_to_openai_tool(tool) for tool in tools],
model=model,
)
return cls(assistant_id=assistant.id, client=client, **kwargs)

View File

@@ -1,7 +1,6 @@
"""Module implements an agent that uses OpenAI's APIs function enabled API."""
from typing import Any, List, Optional, Sequence, Tuple, Type, Union
from langchain_community.tools.convert_to_openai import format_tool_to_openai_function
from langchain_core._api import deprecated
from langchain_core.agents import AgentAction, AgentFinish
from langchain_core.callbacks import BaseCallbackManager, Callbacks
@@ -20,6 +19,7 @@ from langchain_core.prompts.chat import (
from langchain_core.pydantic_v1 import root_validator
from langchain_core.runnables import Runnable, RunnablePassthrough
from langchain_core.tools import BaseTool
from langchain_core.utils.function_calling import convert_to_openai_function
from langchain.agents import BaseSingleActionAgent
from langchain.agents.format_scratchpad.openai_functions import (
@@ -71,7 +71,7 @@ class OpenAIFunctionsAgent(BaseSingleActionAgent):
@property
def functions(self) -> List[dict]:
return [dict(format_tool_to_openai_function(t)) for t in self.tools]
return [dict(convert_to_openai_function(t)) for t in self.tools]
def plan(
self,
@@ -303,9 +303,7 @@ def create_openai_functions_agent(
"Prompt must have input variable `agent_scratchpad`, but wasn't found. "
f"Found {prompt.input_variables} instead."
)
llm_with_tools = llm.bind(
functions=[format_tool_to_openai_function(t) for t in tools]
)
llm_with_tools = llm.bind(functions=[convert_to_openai_function(t) for t in tools])
agent = (
RunnablePassthrough.assign(
agent_scratchpad=lambda x: format_to_openai_function_messages(

View File

@@ -1,10 +1,10 @@
from typing import Sequence
from langchain_community.tools.convert_to_openai import format_tool_to_openai_tool
from langchain_core.language_models import BaseLanguageModel
from langchain_core.prompts.chat import ChatPromptTemplate
from langchain_core.runnables import Runnable, RunnablePassthrough
from langchain_core.tools import BaseTool
from langchain_core.utils.function_calling import convert_to_openai_tool
from langchain.agents.format_scratchpad.openai_tools import (
format_to_openai_tool_messages,
@@ -82,9 +82,7 @@ def create_openai_tools_agent(
if missing_vars:
raise ValueError(f"Prompt missing required variables: {missing_vars}")
llm_with_tools = llm.bind(
tools=[format_tool_to_openai_tool(tool) for tool in tools]
)
llm_with_tools = llm.bind(tools=[convert_to_openai_tool(tool) for tool in tools])
agent = (
RunnablePassthrough.assign(

View File

@@ -1,4 +1,4 @@
from langchain_community.tools.convert_to_openai import format_tool_to_openai_function
from langchain_core.utils.function_calling import format_tool_to_openai_function
# For backwards compatibility
__all__ = ["format_tool_to_openai_function"]

View File

@@ -7,11 +7,11 @@ This module contains various ways to render tools.
from typing import List
# For backwards compatibility
from langchain_community.tools.convert_to_openai import (
from langchain_core.tools import BaseTool
from langchain_core.utils.function_calling import (
format_tool_to_openai_function,
format_tool_to_openai_tool,
)
from langchain_core.tools import BaseTool
__all__ = [
"render_text_description",

View File

@@ -15,13 +15,10 @@ def test_convert_pydantic_to_openai_function() -> None:
"name": "Data",
"description": "The data to return.",
"parameters": {
"title": "Data",
"description": "The data to return.",
"type": "object",
"properties": {
"key": {"title": "Key", "description": "API key", "type": "string"},
"key": {"description": "API key", "type": "string"},
"days": {
"title": "Days",
"description": "Number of days to forecast",
"default": 0,
"type": "integer",
@@ -50,22 +47,17 @@ def test_convert_pydantic_to_openai_function_nested() -> None:
"name": "Model",
"description": "The model to return.",
"parameters": {
"title": "Model",
"description": "The model to return.",
"type": "object",
"properties": {
"data": {
"title": "Data",
"description": "The data to return.",
"type": "object",
"properties": {
"key": {
"title": "Key",
"description": "API key",
"type": "string",
},
"days": {
"title": "Days",
"description": "Number of days to forecast",
"default": 0,
"type": "integer",