Transition prolog_tool doc to langgraph (#29972)

@ccurme As suggested I transitioned the prolog_tool documentation to
langgraph

---------

Co-authored-by: Chester Curme <chester.curme@gmail.com>
This commit is contained in:
Antonio Pisani 2025-02-24 17:34:53 -06:00 committed by GitHub
parent 79f5bbfb26
commit 820a4c068c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -86,33 +86,14 @@
"execution_count": 3, "execution_count": 3,
"id": "68709aa2", "id": "68709aa2",
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [],
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [ "source": [
"#!pip install python-dotenv\n", "#!pip install python-dotenv\n",
"\n", "\n",
"from dotenv import find_dotenv, load_dotenv\n", "from dotenv import find_dotenv, load_dotenv\n",
"\n", "\n",
"load_dotenv(find_dotenv(), override=True)" "load_dotenv(find_dotenv(), override=True)\n",
] "\n",
},
{
"cell_type": "code",
"execution_count": 4,
"id": "c171c1ef",
"metadata": {},
"outputs": [],
"source": [
"#!pip install langchain-openai\n", "#!pip install langchain-openai\n",
"\n", "\n",
"from langchain_core.messages import HumanMessage\n", "from langchain_core.messages import HumanMessage\n",
@ -124,19 +105,37 @@
"id": "9d23b2c5", "id": "9d23b2c5",
"metadata": {}, "metadata": {},
"source": [ "source": [
"To use the tool, bind it to the LLM model and query the model:" "To use the tool, bind it to the LLM model:"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 5, "execution_count": 4,
"id": "3ce4479d", "id": "3ce4479d",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"llm = ChatOpenAI(model=\"gpt-4o-mini\")\n", "llm = ChatOpenAI(model=\"gpt-4o-mini\")\n",
"llm_with_tools = llm.bind_tools([prolog_tool])\n", "llm_with_tools = llm.bind_tools([prolog_tool])"
"messages = [HumanMessage(\"Who are John's children?\")]\n", ]
},
{
"cell_type": "markdown",
"id": "74767c7c",
"metadata": {},
"source": [
"and then query the model:"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "affb960e",
"metadata": {},
"outputs": [],
"source": [
"query = \"Who are John's children?\"\n",
"messages = [HumanMessage(query)]\n",
"response = llm_with_tools.invoke(messages)" "response = llm_with_tools.invoke(messages)"
] ]
}, },
@ -159,7 +158,7 @@
"text/plain": [ "text/plain": [
"{'name': 'family_query',\n", "{'name': 'family_query',\n",
" 'args': {'men': 'john', 'women': None, 'child': None},\n", " 'args': {'men': 'john', 'women': None, 'child': None},\n",
" 'id': 'call_3VazWUstCGlY8zczi05TaduU',\n", " 'id': 'call_gH8rWamYXITrkfvRP2s5pkbF',\n",
" 'type': 'tool_call'}" " 'type': 'tool_call'}"
] ]
}, },
@ -208,7 +207,7 @@
{ {
"data": { "data": {
"text/plain": [ "text/plain": [
"ToolMessage(content='[{\"Women\": \"bianca\", \"Child\": \"mary\"}, {\"Women\": \"bianca\", \"Child\": \"michael\"}]', name='family_query', tool_call_id='call_3VazWUstCGlY8zczi05TaduU')" "ToolMessage(content='[{\"Women\": \"bianca\", \"Child\": \"mary\"}, {\"Women\": \"bianca\", \"Child\": \"michael\"}]', name='family_query', tool_call_id='call_gH8rWamYXITrkfvRP2s5pkbF')"
] ]
}, },
"execution_count": 8, "execution_count": 8,
@ -239,7 +238,7 @@
"name": "stdout", "name": "stdout",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"John has two children: Mary and Michael. Their mother is Bianca.\n" "John has two children: Mary and Michael, with Bianca as their mother.\n"
] ]
} }
], ],
@ -269,28 +268,15 @@
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 10, "execution_count": 10,
"id": "b7bbfd79", "id": "8957d420",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"from langchain.agents import AgentExecutor, create_tool_calling_agent\n", "#!pip install langgraph\n",
"from langchain_core.prompts import ChatPromptTemplate\n",
"\n", "\n",
"llm = ChatOpenAI(model=\"gpt-4o-mini\")\n", "from langgraph.prebuilt import create_react_agent\n",
"\n", "\n",
"prompt = ChatPromptTemplate.from_messages(\n", "agent_executor = create_react_agent(llm, [prolog_tool])"
" [\n",
" (\"system\", \"You are a helpful assistant\"),\n",
" (\"human\", \"{input}\"),\n",
" (\"placeholder\", \"{agent_scratchpad}\"),\n",
" ]\n",
")\n",
"\n",
"tools = [prolog_tool]\n",
"\n",
"agent = create_tool_calling_agent(llm, tools, prompt)\n",
"\n",
"agent_executor = AgentExecutor(agent=agent, tools=tools)"
] ]
}, },
{ {
@ -310,7 +296,7 @@
}, },
"outputs": [], "outputs": [],
"source": [ "source": [
"answer = agent_executor.invoke({\"input\": \"Who are John's children?\"})" "messages = agent_executor.invoke({\"messages\": [(\"human\", query)]})"
] ]
}, },
{ {
@ -318,7 +304,7 @@
"id": "92e71ffa", "id": "92e71ffa",
"metadata": {}, "metadata": {},
"source": [ "source": [
"Then the agent recieves the tool response as part of the `\"agent_scratchpad`\" placeholder and generates the answer:" "Then the agent receives the tool response and generates the answer:"
] ]
}, },
{ {
@ -331,12 +317,14 @@
"name": "stdout", "name": "stdout",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"==================================\u001b[1m Ai Message \u001b[0m==================================\n",
"\n",
"John has two children: Mary and Michael, with Bianca as their mother.\n" "John has two children: Mary and Michael, with Bianca as their mother.\n"
] ]
} }
], ],
"source": [ "source": [
"print(answer[\"output\"])" "messages[\"messages\"][-1].pretty_print()"
] ]
}, },
{ {
@ -346,17 +334,23 @@
"source": [ "source": [
"## API reference\n", "## API reference\n",
"\n", "\n",
"(TODO: update with API reference once built.)\n", "See https://langchain-prolog.readthedocs.io/en/latest/modules.html for detail."
"\n",
"See https://github.com/apisani1/langchain-prolog/tree/main for detail."
] ]
},
{
"cell_type": "code",
"execution_count": null,
"id": "a95afa82",
"metadata": {},
"outputs": [],
"source": []
} }
], ],
"metadata": { "metadata": {
"kernelspec": { "kernelspec": {
"display_name": "Python 3.9 (test-env)", "display_name": "Python 3.10 (test-again)",
"language": "python", "language": "python",
"name": "test-env" "name": "test-again"
}, },
"language_info": { "language_info": {
"codemirror_mode": { "codemirror_mode": {
@ -368,7 +362,7 @@
"name": "python", "name": "python",
"nbconvert_exporter": "python", "nbconvert_exporter": "python",
"pygments_lexer": "ipython3", "pygments_lexer": "ipython3",
"version": "3.9.21" "version": "3.10.16"
} }
}, },
"nbformat": 4, "nbformat": 4,