diff --git a/docs/docs/integrations/callbacks/streamlit.md b/docs/docs/integrations/callbacks/streamlit.md index 47b39cfd0b6..ad42d462d00 100644 --- a/docs/docs/integrations/callbacks/streamlit.md +++ b/docs/docs/integrations/callbacks/streamlit.md @@ -28,7 +28,7 @@ You can run `streamlit hello` to load a sample app and validate your install suc To create a `StreamlitCallbackHandler`, you just need to provide a parent container to render the output. ```python -from langchain.callbacks import StreamlitCallbackHandler +from langchain_community.callbacks import StreamlitCallbackHandler import streamlit as st st_callback = StreamlitCallbackHandler(st.container()) @@ -44,23 +44,26 @@ agent in your Streamlit app and simply pass the `StreamlitCallbackHandler` to `a thoughts and actions live in your app. ```python -from langchain_openai import OpenAI -from langchain.agents import AgentType, initialize_agent, load_tools -from langchain_community.callbacks import StreamlitCallbackHandler import streamlit as st +from langchain import hub +from langchain.agents import AgentExecutor, create_react_agent, load_tools +from langchain_community.callbacks import StreamlitCallbackHandler +from langchain_openai import OpenAI llm = OpenAI(temperature=0, streaming=True) tools = load_tools(["ddg-search"]) -agent = initialize_agent( - tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True -) +prompt = hub.pull("hwchase17/react") +agent = create_react_agent(llm, tools, prompt) +agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True) if prompt := st.chat_input(): st.chat_message("user").write(prompt) with st.chat_message("assistant"): st_callback = StreamlitCallbackHandler(st.container()) - response = agent.run(prompt, callbacks=[st_callback]) - st.write(response) + response = agent_executor.invoke( + {"input": prompt}, {"callbacks": [st_callback]} + ) + st.write(response["output"]) ``` **Note:** You will need to set `OPENAI_API_KEY` for the above app code to run successfully.