diff --git a/docs/docs/integrations/tools/prolog_tool.ipynb b/docs/docs/integrations/tools/prolog_tool.ipynb index 8e0dc9d1e7a..37d38b78ddc 100644 --- a/docs/docs/integrations/tools/prolog_tool.ipynb +++ b/docs/docs/integrations/tools/prolog_tool.ipynb @@ -86,33 +86,14 @@ "execution_count": 3, "id": "68709aa2", "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "#!pip install python-dotenv\n", "\n", "from dotenv import find_dotenv, load_dotenv\n", "\n", - "load_dotenv(find_dotenv(), override=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "c171c1ef", - "metadata": {}, - "outputs": [], - "source": [ + "load_dotenv(find_dotenv(), override=True)\n", + "\n", "#!pip install langchain-openai\n", "\n", "from langchain_core.messages import HumanMessage\n", @@ -124,19 +105,37 @@ "id": "9d23b2c5", "metadata": {}, "source": [ - "To use the tool, bind it to the LLM model and query the model:" + "To use the tool, bind it to the LLM model:" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 4, "id": "3ce4479d", "metadata": {}, "outputs": [], "source": [ "llm = ChatOpenAI(model=\"gpt-4o-mini\")\n", - "llm_with_tools = llm.bind_tools([prolog_tool])\n", - "messages = [HumanMessage(\"Who are John's children?\")]\n", + "llm_with_tools = llm.bind_tools([prolog_tool])" + ] + }, + { + "cell_type": "markdown", + "id": "74767c7c", + "metadata": {}, + "source": [ + "and then query the model:" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "affb960e", + "metadata": {}, + "outputs": [], + "source": [ + "query = \"Who are John's children?\"\n", + "messages = [HumanMessage(query)]\n", "response = llm_with_tools.invoke(messages)" ] }, @@ -159,7 +158,7 @@ "text/plain": [ "{'name': 'family_query',\n", " 'args': {'men': 'john', 'women': None, 'child': None},\n", - " 'id': 'call_3VazWUstCGlY8zczi05TaduU',\n", + " 'id': 'call_gH8rWamYXITrkfvRP2s5pkbF',\n", " 'type': 'tool_call'}" ] }, @@ -208,7 +207,7 @@ { "data": { "text/plain": [ - "ToolMessage(content='[{\"Women\": \"bianca\", \"Child\": \"mary\"}, {\"Women\": \"bianca\", \"Child\": \"michael\"}]', name='family_query', tool_call_id='call_3VazWUstCGlY8zczi05TaduU')" + "ToolMessage(content='[{\"Women\": \"bianca\", \"Child\": \"mary\"}, {\"Women\": \"bianca\", \"Child\": \"michael\"}]', name='family_query', tool_call_id='call_gH8rWamYXITrkfvRP2s5pkbF')" ] }, "execution_count": 8, @@ -239,7 +238,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "John has two children: Mary and Michael. Their mother is Bianca.\n" + "John has two children: Mary and Michael, with Bianca as their mother.\n" ] } ], @@ -269,28 +268,15 @@ { "cell_type": "code", "execution_count": 10, - "id": "b7bbfd79", + "id": "8957d420", "metadata": {}, "outputs": [], "source": [ - "from langchain.agents import AgentExecutor, create_tool_calling_agent\n", - "from langchain_core.prompts import ChatPromptTemplate\n", + "#!pip install langgraph\n", "\n", - "llm = ChatOpenAI(model=\"gpt-4o-mini\")\n", + "from langgraph.prebuilt import create_react_agent\n", "\n", - "prompt = ChatPromptTemplate.from_messages(\n", - " [\n", - " (\"system\", \"You are a helpful assistant\"),\n", - " (\"human\", \"{input}\"),\n", - " (\"placeholder\", \"{agent_scratchpad}\"),\n", - " ]\n", - ")\n", - "\n", - "tools = [prolog_tool]\n", - "\n", - "agent = create_tool_calling_agent(llm, tools, prompt)\n", - "\n", - "agent_executor = AgentExecutor(agent=agent, tools=tools)" + "agent_executor = create_react_agent(llm, [prolog_tool])" ] }, { @@ -310,7 +296,7 @@ }, "outputs": [], "source": [ - "answer = agent_executor.invoke({\"input\": \"Who are John's children?\"})" + "messages = agent_executor.invoke({\"messages\": [(\"human\", query)]})" ] }, { @@ -318,7 +304,7 @@ "id": "92e71ffa", "metadata": {}, "source": [ - "Then the agent recieves the tool response as part of the `\"agent_scratchpad`\" placeholder and generates the answer:" + "Then the agent receives​ the tool response and generates the answer:" ] }, { @@ -331,12 +317,14 @@ "name": "stdout", "output_type": "stream", "text": [ + "==================================\u001b[1m Ai Message \u001b[0m==================================\n", + "\n", "John has two children: Mary and Michael, with Bianca as their mother.\n" ] } ], "source": [ - "print(answer[\"output\"])" + "messages[\"messages\"][-1].pretty_print()" ] }, { @@ -346,17 +334,23 @@ "source": [ "## API reference\n", "\n", - "(TODO: update with API reference once built.)\n", - "\n", - "See https://github.com/apisani1/langchain-prolog/tree/main for detail." + "See https://langchain-prolog.readthedocs.io/en/latest/modules.html for detail." ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a95afa82", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { "kernelspec": { - "display_name": "Python 3.9 (test-env)", + "display_name": "Python 3.10 (test-again)", "language": "python", - "name": "test-env" + "name": "test-again" }, "language_info": { "codemirror_mode": { @@ -368,7 +362,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.21" + "version": "3.10.16" } }, "nbformat": 4,