community[minor]: add tool calling for DeepInfraChat (#22745)

DeepInfra now supports tool calling for supported models.

---------

Co-authored-by: Bagatur <baskaryan@gmail.com>
This commit is contained in:
Oguz Vuruskaner
2024-06-17 12:21:49 -07:00
committed by GitHub
parent 158701ab3c
commit dd25d08c06
3 changed files with 211 additions and 17 deletions

View File

@@ -98,6 +98,78 @@
")\n",
"chat.invoke(messages)"
]
},
{
"cell_type": "markdown",
"id": "466c3cb41ace1410",
"metadata": {},
"source": [
"# Tool Calling\n",
"\n",
"DeepInfra currently supports only invoke and async invoke tool calling.\n",
"\n",
"For a complete list of models that support tool calling, please refer to our [tool calling documentation](https://deepinfra.com/docs/advanced/function_calling)."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ddc4f4299763651c",
"metadata": {},
"outputs": [],
"source": [
"import asyncio\n",
"\n",
"from dotenv import find_dotenv, load_dotenv\n",
"from langchain_community.chat_models import ChatDeepInfra\n",
"from langchain_core.messages import HumanMessage\n",
"from langchain_core.pydantic_v1 import BaseModel\n",
"from langchain_core.tools import tool\n",
"\n",
"model_name = \"meta-llama/Meta-Llama-3-70B-Instruct\"\n",
"\n",
"_ = load_dotenv(find_dotenv())\n",
"\n",
"\n",
"# Langchain tool\n",
"@tool\n",
"def foo(something):\n",
" \"\"\"\n",
" Called when foo\n",
" \"\"\"\n",
" pass\n",
"\n",
"\n",
"# Pydantic class\n",
"class Bar(BaseModel):\n",
" \"\"\"\n",
" Called when Bar\n",
" \"\"\"\n",
"\n",
" pass\n",
"\n",
"\n",
"llm = ChatDeepInfra(model=model_name)\n",
"tools = [foo, Bar]\n",
"llm_with_tools = llm.bind_tools(tools)\n",
"messages = [\n",
" HumanMessage(\"Foo and bar, please.\"),\n",
"]\n",
"\n",
"response = llm_with_tools.invoke(messages)\n",
"print(response.tool_calls)\n",
"# [{'name': 'foo', 'args': {'something': None}, 'id': 'call_Mi4N4wAtW89OlbizFE1aDxDj'}, {'name': 'Bar', 'args': {}, 'id': 'call_daiE0mW454j2O1KVbmET4s2r'}]\n",
"\n",
"\n",
"async def call_ainvoke():\n",
" result = await llm_with_tools.ainvoke(messages)\n",
" print(result.tool_calls)\n",
"\n",
"\n",
"# Async call\n",
"asyncio.run(call_ainvoke())\n",
"# [{'name': 'foo', 'args': {'something': None}, 'id': 'call_ZH7FetmgSot4LHcMU6CEb8tI'}, {'name': 'Bar', 'args': {}, 'id': 'call_2MQhDifAJVoijZEvH8PeFSVB'}]"
]
}
],
"metadata": {