diff --git a/docs/docs/integrations/tools/playwright.ipynb b/docs/docs/integrations/tools/playwright.ipynb index e09f4e79cd2..ed0e0a58891 100644 --- a/docs/docs/integrations/tools/playwright.ipynb +++ b/docs/docs/integrations/tools/playwright.ipynb @@ -231,39 +231,25 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/isaachershenson/.pyenv/versions/3.11.9/lib/python3.11/site-packages/langchain_core/_api/deprecation.py:139: LangChainDeprecationWarning: The function `initialize_agent` was deprecated in LangChain 0.1.0 and will be removed in 0.3.0. Use Use new agent constructor methods like create_react_agent, create_json_agent, create_structured_chat_agent, etc. instead.\n", - " warn_deprecated(\n" - ] - } - ], + "outputs": [], "source": [ - "from langchain.agents import AgentType, initialize_agent\n", "from langchain_anthropic import ChatAnthropic\n", + "from langgraph.prebuilt import create_react_agent\n", "\n", "llm = ChatAnthropic(\n", " model_name=\"claude-3-haiku-20240307\", temperature=0\n", ") # or any other LLM, e.g., ChatOpenAI(), OpenAI()\n", "\n", - "agent_chain = initialize_agent(\n", - " tools,\n", - " llm,\n", - " agent=AgentType.STRUCTURED_CHAT_ZERO_SHOT_REACT_DESCRIPTION,\n", - " verbose=True,\n", - ")" + "agent_chain = create_react_agent(model=llm, tools=tools)" ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": { "tags": [] }, @@ -298,7 +284,7 @@ "```\n", "\n", "\u001b[0m\n", - "Observation: \u001b[31;1m\u001b[1;3mLangChain We value your privacy We use cookies to analyze our traffic. By clicking \"Accept All\", you consent to our use of cookies. Privacy Policy Customize Reject All Accept All Customize Consent Preferences We may use cookies to help you navigate efficiently and perform certain functions. You will find detailed information about all cookies under each consent category below. The cookies that are categorized as \"Necessary\" are stored on your browser as they are essential for enabling the basic functionalities of the site.... Show more Necessary Always Active Necessary cookies are required to enable the basic features of this site, such as providing secure log-in or adjusting your consent preferences. These cookies do not store any personally identifiable data. Functional Functional cookies help perform certain functionalities like sharing the content of the website on social media platforms, collecting feedback, and other third-party features. Analytics Analytical cookies are used to understand how visitors interact with the website. These cookies help provide information on metrics such as the number of visitors, bounce rate, traffic source, etc. Performance Performance cookies are used to understand and analyze the key performance indexes of the website which helps in delivering a better user experience for the visitors. Advertisement Advertisement cookies are used to provide visitors with customized advertisements based on the pages you visited previously and to analyze the effectiveness of the ad campaigns. Uncategorized Other uncategorized cookies are those that are being analyzed and have not been classified into a category as yet. Reject All Save My Preferences Accept All Products LangChain LangSmith LangGraph Methods Retrieval Agents Evaluation Resources Blog Case Studies Use Case Inspiration Experts Changelog Docs LangChain Docs LangSmith Docs Company About Careers Pricing Get a demo Sign up LangChain’s suite of products supports developers along each step of the LLM application lifecycle. Applications that can reason. Powered by LangChain. Get a demo Sign up for free From startups to global enterprises, ambitious builders choose LangChain products. Build LangChain is a framework to build with LLMs by chaining interoperable components. LangGraph is the framework for building controllable agentic workflows. Run Deploy your LLM applications at scale with LangGraph Cloud, our infrastructure purpose-built for agents. Manage Debug, collaborate, test, and monitor your LLM app in LangSmith - whether it's built with a LangChain framework or not. Build your app with LangChain Build context-aware, reasoning applications with LangChain’s flexible framework that leverages your company’s data and APIs. Future-proof your application by making vendor optionality part of your LLM infrastructure design. Learn more about LangChain Run at scale with LangGraph Cloud Deploy your LangGraph app with LangGraph Cloud for fault-tolerant scalability - including support for async background jobs, built-in persistence, and distributed task queues. Learn more about LangGraph Manage LLM performance with LangSmith Ship faster with LangSmith’s debug, test, deploy, and monitoring workflows. Don’t rely on “vibes” – add engineering rigor to your LLM-development workflow, whether you’re building with LangChain or not. Learn more about LangSmith Hear from our happy customers LangChain, LangGraph, and LangSmith help teams of all sizes, across all industries - from ambitious startups to established enterprises. “LangSmith helped us improve the accuracy and performance of Retool’s fine-tuned models. Not only did we deliver a better product by iterating with LangSmith, but we’re shipping new AI features to our users in a fraction of the time it would have taken without it.” Jamie Cuffe Head of Self-Serve and New Products “By combining the benefits of LangSmith and standing on the shoulders of a gigantic open-source community, we’re able to identify the right approaches of using LLMs in an enterprise-setting faster.” Yusuke Kaji General Manager of AI “Working with LangChain and LangSmith on the Elastic AI Assistant had a significant positive impact on the overall pace and quality of the development and shipping experience. We couldn’t have achieved  the product experience delivered to our customers without LangChain, and we couldn’t have done it at the same pace without LangSmith.” James Spiteri Director of Security Products “As soon as we heard about LangSmith, we moved our entire development stack onto it. We could have built evaluation, testing and monitoring tools in house, but with LangSmith it took us 10x less time to get a 1000x better tool.” Jose Peña Senior Manager The reference architecture enterprises adopt for success. LangChain’s suite of products can be used independently or stacked together for multiplicative impact – guiding you through building, running, and managing your LLM apps. 15M+ Monthly Downloads 100K+ Apps Powered 75K+ GitHub Stars 3K+ Contributors The biggest developer community in GenAI Learn alongside the 1M+ developers who are pushing the industry forward. Explore LangChain Get started with the LangSmith platform today Get a demo Sign up for free Teams building with LangChain are driving operational efficiency, increasing discovery & personalization, and delivering premium products that generate revenue. Discover Use Cases Get inspired by companies who have done it. Financial Services FinTech Technology LangSmith is the enterprise DevOps platform built for LLMs. Explore LangSmith Gain visibility to make trade offs between cost, latency, and quality. Increase developer productivity. Eliminate manual, error-prone testing. Reduce hallucinations and improve reliability. Enterprise deployment options to keep data secure. Ready to start shipping 
", + "Observation: \u001b[31;1m\u001b[1;3mLangChain We value your privacy We use cookies to analyze our traffic. By clicking \"Accept All\", you consent to our use of cookies. Privacy Policy Customize Reject All Accept All Customize Consent Preferences We may use cookies to help you navigate efficiently and perform certain functions. You will find detailed information about all cookies under each consent category below. The cookies that are categorized as \"Necessary\" are stored on your browser as they are essential for enabling the basic functionalities of the site.... Show more Necessary Always Active Necessary cookies are required to enable the basic features of this site, such as providing secure log-in or adjusting your consent preferences. These cookies do not store any personally identifiable data. Functional Functional cookies help perform certain functionalities like sharing the content of the website on social media platforms, collecting feedback, and other third-party features. Analytics Analytical cookies are used to understand how visitors interact with the website. These cookies help provide information on metrics such as the number of visitors, bounce rate, traffic source, etc. Performance Performance cookies are used to understand and analyze the key performance indexes of the website which helps in delivering a better user experience for the visitors. Advertisement Advertisement cookies are used to provide visitors with customized advertisements based on the pages you visited previously and to analyze the effectiveness of the ad campaigns. Uncategorized Other uncategorized cookies are those that are being analyzed and have not been classified into a category as yet. Reject All Save My Preferences Accept All Products LangChain LangSmith LangGraph Methods Retrieval Agents Evaluation Resources Blog Case Studies Use Case Inspiration Experts Changelog Docs LangChain Docs LangSmith Docs Company About Careers Pricing Get a demo Sign up LangChain’s suite of products supports developers along each step of the LLM application lifecycle. Applications that can reason. Powered by LangChain. Get a demo Sign up for free From startups to global enterprises, ambitious builders choose LangChain products. Build LangChain is a framework to build with LLMs by chaining interoperable components. LangGraph is the framework for building controllable agentic workflows. Run Deploy your LLM applications at scale with LangGraph Cloud, our infrastructure purpose-built for agents. Manage Debug, collaborate, test, and monitor your LLM app in LangSmith - whether it's built with a LangChain framework or not. Build your app with LangChain Build context-aware, reasoning applications with LangChain’s flexible framework that leverages your company’s data and APIs. Future-proof your application by making vendor optionality part of your LLM infrastructure design. Learn more about LangChain Run at scale with LangGraph Cloud Deploy your LangGraph app with LangGraph Cloud for fault-tolerant scalability - including support for async background jobs, built-in persistence, and distributed task queues. Learn more about LangGraph Manage LLM performance with LangSmith Ship faster with LangSmith’s debug, test, deploy, and monitoring workflows. Don’t rely on “vibes” – add engineering rigor to your LLM-development workflow, whether you’re building with LangChain or not. Learn more about LangSmith Hear from our happy customers LangChain, LangGraph, and LangSmith help teams of all sizes, across all industries - from ambitious startups to established enterprises. “LangSmith helped us improve the accuracy and performance of Retool’s fine-tuned models. Not only did we deliver a better product by iterating with LangSmith, but we’re shipping new AI features to our users in a fraction of the time it would have taken without it.” Jamie Cuffe Head of Self-Serve and New Products “By combining the benefits of LangSmith and standing on the shoulders of a gigantic open-source community, we’re able to identify the right approaches of using LLMs in an enterprise-setting faster.” Yusuke Kaji General Manager of AI “Working with LangChain and LangSmith on the Elastic AI Assistant had a significant positive impact on the overall pace and quality of the development and shipping experience. We couldn’t have achieved  the product experience delivered to our customers without LangChain, and we couldn’t have done it at the same pace without LangSmith.” James Spiteri Director of Security Products “As soon as we heard about LangSmith, we moved our entire development stack onto it. We could have built evaluation, testing and monitoring tools in house, but with LangSmith it took us 10x less time to get a 1000x better tool.” Jose Peña Senior Manager The reference architecture enterprises adopt for success. LangChain’s suite of products can be used independently or stacked together for multiplicative impact – guiding you through building, running, and managing your LLM apps. 15M+ Monthly Downloads 100K+ Apps Powered 75K+ GitHub Stars 3K+ Contributors The biggest developer community in GenAI Learn alongside the 1M+ developers who are pushing the industry forward. Explore LangChain Get started with the LangSmith platform today Get a demo Sign up for free Teams building with LangChain are driving operational efficiency, increasing discovery & personalization, and delivering premium products that generate revenue. Discover Use Cases Get inspired by companies who have done it. Financial Services FinTech Technology LangSmith is the enterprise DevOps platform built for LLMs. Explore LangSmith Gain visibility to make trade offs between cost, latency, and quality. Increase developer productivity. Eliminate manual, error-prone testing. Reduce hallucinations and improve reliability. Enterprise deployment options to keep data secure. Ready to start shipping 
\n", "reliable GenAI apps faster? Get started with LangChain, LangGraph, and LangSmith to enhance your LLM app development, from prototype to production. Get a demo Sign up for free Products LangChain LangSmith LangGraph Agents Evaluation Retrieval Resources Python Docs JS/TS Docs GitHub Integrations Templates Changelog LangSmith Trust Portal Company About Blog Twitter LinkedIn YouTube Community Marketing Assets Sign up for our newsletter to stay up to date Thank you! Your submission has been received! Oops! Something went wrong while submitting the form. All systems operational Privacy Policy Terms of Service\u001b[0m\n", "Thought:\u001b[32;1m\u001b[1;3mBased on the text extracted from the langchain.com website, the main headers I can see are:\n", "\n", @@ -368,7 +354,9 @@ } ], "source": [ - "result = await agent_chain.arun(\"What are the headers on langchain.com?\")\n", + "result = await agent_chain.ainvoke(\n", + " {\"messages\": [(\"user\", \"What are the headers on langchain.com?\")]}\n", + ")\n", "print(result)" ] }