mirror of
https://github.com/hwchase17/langchain.git
synced 2026-02-21 14:43:07 +00:00
141 lines
5.5 KiB
Plaintext
141 lines
5.5 KiB
Plaintext
{
|
|
"cells": [
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"# How to pass callbacks in at runtime\n",
|
|
"\n",
|
|
":::info Prerequisites\n",
|
|
"\n",
|
|
"This guide assumes familiarity with the following concepts:\n",
|
|
"\n",
|
|
"- [Callbacks](/docs/concepts/#callbacks)\n",
|
|
"- [Custom callback handlers](/docs/how_to/custom_callbacks)\n",
|
|
"\n",
|
|
":::\n",
|
|
"\n",
|
|
"In many cases, it is advantageous to pass in handlers instead when running the object. When we pass through [`CallbackHandlers`](https://api.python.langchain.com/en/latest/callbacks/langchain_core.callbacks.base.BaseCallbackHandler.html#langchain-core-callbacks-base-basecallbackhandler) using the `callbacks` keyword arg when executing an run, those callbacks will be issued by all nested objects involved in the execution. For example, when a handler is passed through to an Agent, it will be used for all callbacks related to the agent and all the objects involved in the agent's execution, in this case, the Tools and LLM.\n",
|
|
"\n",
|
|
"This prevents us from having to manually attach the handlers to each individual nested object. Here's an example:"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 1,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# | output: false\n",
|
|
"# | echo: false\n",
|
|
"\n",
|
|
"%pip install -qU langchain langchain_anthropic\n",
|
|
"\n",
|
|
"import getpass\n",
|
|
"import os\n",
|
|
"\n",
|
|
"os.environ[\"ANTHROPIC_API_KEY\"] = getpass.getpass()"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 4,
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"name": "stdout",
|
|
"output_type": "stream",
|
|
"text": [
|
|
"Chain RunnableSequence started\n",
|
|
"Chain ChatPromptTemplate started\n",
|
|
"Chain ended, outputs: messages=[HumanMessage(content='What is 1 + 2?')]\n",
|
|
"Chat model started\n",
|
|
"Chat model ended, response: generations=[[ChatGeneration(text='1 + 2 = 3', message=AIMessage(content='1 + 2 = 3', response_metadata={'id': 'msg_01D8Tt5FdtBk5gLTfBPm2tac', 'model': 'claude-3-sonnet-20240229', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 16, 'output_tokens': 13}}, id='run-bb0dddd8-85f3-4e6b-8553-eaa79f859ef8-0'))]] llm_output={'id': 'msg_01D8Tt5FdtBk5gLTfBPm2tac', 'model': 'claude-3-sonnet-20240229', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 16, 'output_tokens': 13}} run=None\n",
|
|
"Chain ended, outputs: content='1 + 2 = 3' response_metadata={'id': 'msg_01D8Tt5FdtBk5gLTfBPm2tac', 'model': 'claude-3-sonnet-20240229', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 16, 'output_tokens': 13}} id='run-bb0dddd8-85f3-4e6b-8553-eaa79f859ef8-0'\n"
|
|
]
|
|
},
|
|
{
|
|
"data": {
|
|
"text/plain": [
|
|
"AIMessage(content='1 + 2 = 3', response_metadata={'id': 'msg_01D8Tt5FdtBk5gLTfBPm2tac', 'model': 'claude-3-sonnet-20240229', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 16, 'output_tokens': 13}}, id='run-bb0dddd8-85f3-4e6b-8553-eaa79f859ef8-0')"
|
|
]
|
|
},
|
|
"execution_count": 4,
|
|
"metadata": {},
|
|
"output_type": "execute_result"
|
|
}
|
|
],
|
|
"source": [
|
|
"from typing import Any, Dict, List\n",
|
|
"\n",
|
|
"from langchain_anthropic import ChatAnthropic\n",
|
|
"from langchain_core.callbacks import BaseCallbackHandler\n",
|
|
"from langchain_core.messages import BaseMessage\n",
|
|
"from langchain_core.outputs import LLMResult\n",
|
|
"from langchain_core.prompts import ChatPromptTemplate\n",
|
|
"\n",
|
|
"\n",
|
|
"class LoggingHandler(BaseCallbackHandler):\n",
|
|
" def on_chat_model_start(\n",
|
|
" self, serialized: Dict[str, Any], messages: List[List[BaseMessage]], **kwargs\n",
|
|
" ) -> None:\n",
|
|
" print(\"Chat model started\")\n",
|
|
"\n",
|
|
" def on_llm_end(self, response: LLMResult, **kwargs) -> None:\n",
|
|
" print(f\"Chat model ended, response: {response}\")\n",
|
|
"\n",
|
|
" def on_chain_start(\n",
|
|
" self, serialized: Dict[str, Any], inputs: Dict[str, Any], **kwargs\n",
|
|
" ) -> None:\n",
|
|
" print(f\"Chain {serialized.get('name')} started\")\n",
|
|
"\n",
|
|
" def on_chain_end(self, outputs: Dict[str, Any], **kwargs) -> None:\n",
|
|
" print(f\"Chain ended, outputs: {outputs}\")\n",
|
|
"\n",
|
|
"\n",
|
|
"callbacks = [LoggingHandler()]\n",
|
|
"llm = ChatAnthropic(model=\"claude-3-sonnet-20240229\")\n",
|
|
"prompt = ChatPromptTemplate.from_template(\"What is 1 + {number}?\")\n",
|
|
"\n",
|
|
"chain = prompt | llm\n",
|
|
"\n",
|
|
"chain.invoke({\"number\": \"2\"}, config={\"callbacks\": callbacks})"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"If there are already existing callbacks associated with a module, these will run in addition to any passed in at runtime.\n",
|
|
"\n",
|
|
"## Next steps\n",
|
|
"\n",
|
|
"You've now learned how to pass callbacks at runtime.\n",
|
|
"\n",
|
|
"Next, check out the other how-to guides in this section, such as how to [pass callbacks into a module constructor](/docs/how_to/custom_callbacks)."
|
|
]
|
|
}
|
|
],
|
|
"metadata": {
|
|
"kernelspec": {
|
|
"display_name": "Python 3",
|
|
"language": "python",
|
|
"name": "python3"
|
|
},
|
|
"language_info": {
|
|
"codemirror_mode": {
|
|
"name": "ipython",
|
|
"version": 3
|
|
},
|
|
"file_extension": ".py",
|
|
"mimetype": "text/x-python",
|
|
"name": "python",
|
|
"nbconvert_exporter": "python",
|
|
"pygments_lexer": "ipython3",
|
|
"version": "3.10.5"
|
|
}
|
|
},
|
|
"nbformat": 4,
|
|
"nbformat_minor": 2
|
|
}
|