mirror of
				https://github.com/hwchase17/langchain.git
				synced 2025-10-30 23:29:54 +00:00 
			
		
		
		
	
		
			
				
	
	
		
			133 lines
		
	
	
		
			3.7 KiB
		
	
	
	
		
			Plaintext
		
	
	
	
	
	
			
		
		
	
	
			133 lines
		
	
	
		
			3.7 KiB
		
	
	
	
		
			Plaintext
		
	
	
	
	
	
| {
 | |
|  "cells": [
 | |
|   {
 | |
|    "cell_type": "markdown",
 | |
|    "id": "593f7553-7038-498e-96d4-8255e5ce34f0",
 | |
|    "metadata": {},
 | |
|    "source": [
 | |
|     "# Async API for Chain\n",
 | |
|     "\n",
 | |
|     "LangChain provides async support for Chains by leveraging the [asyncio](https://docs.python.org/3/library/asyncio.html) library.\n",
 | |
|     "\n",
 | |
|     "Async methods are currently supported in `LLMChain` (through `arun`, `apredict`, `acall`) and `LLMMathChain` (through `arun` and `acall`), `ChatVectorDBChain`, and [QA chains](../indexes/chain_examples/question_answering.html). Async support for other chains is on the roadmap."
 | |
|    ]
 | |
|   },
 | |
|   {
 | |
|    "cell_type": "code",
 | |
|    "execution_count": 1,
 | |
|    "id": "c19c736e-ca74-4726-bb77-0a849bcc2960",
 | |
|    "metadata": {
 | |
|     "tags": []
 | |
|    },
 | |
|    "outputs": [
 | |
|     {
 | |
|      "name": "stdout",
 | |
|      "output_type": "stream",
 | |
|      "text": [
 | |
|       "\n",
 | |
|       "\n",
 | |
|       "BrightSmile Toothpaste Company\n",
 | |
|       "\n",
 | |
|       "\n",
 | |
|       "BrightSmile Toothpaste Co.\n",
 | |
|       "\n",
 | |
|       "\n",
 | |
|       "BrightSmile Toothpaste\n",
 | |
|       "\n",
 | |
|       "\n",
 | |
|       "Gleaming Smile Inc.\n",
 | |
|       "\n",
 | |
|       "\n",
 | |
|       "SparkleSmile Toothpaste\n",
 | |
|       "\u001B[1mConcurrent executed in 1.54 seconds.\u001B[0m\n",
 | |
|       "\n",
 | |
|       "\n",
 | |
|       "BrightSmile Toothpaste Co.\n",
 | |
|       "\n",
 | |
|       "\n",
 | |
|       "MintyFresh Toothpaste Co.\n",
 | |
|       "\n",
 | |
|       "\n",
 | |
|       "SparkleSmile Toothpaste.\n",
 | |
|       "\n",
 | |
|       "\n",
 | |
|       "Pearly Whites Toothpaste Co.\n",
 | |
|       "\n",
 | |
|       "\n",
 | |
|       "BrightSmile Toothpaste.\n",
 | |
|       "\u001B[1mSerial executed in 6.38 seconds.\u001B[0m\n"
 | |
|      ]
 | |
|     }
 | |
|    ],
 | |
|    "source": [
 | |
|     "import asyncio\n",
 | |
|     "import time\n",
 | |
|     "\n",
 | |
|     "from langchain.llms import OpenAI\n",
 | |
|     "from langchain.prompts import PromptTemplate\n",
 | |
|     "from langchain.chains import LLMChain\n",
 | |
|     "\n",
 | |
|     "\n",
 | |
|     "def generate_serially():\n",
 | |
|     "    llm = OpenAI(temperature=0.9)\n",
 | |
|     "    prompt = PromptTemplate(\n",
 | |
|     "        input_variables=[\"product\"],\n",
 | |
|     "        template=\"What is a good name for a company that makes {product}?\",\n",
 | |
|     "    )\n",
 | |
|     "    chain = LLMChain(llm=llm, prompt=prompt)\n",
 | |
|     "    for _ in range(5):\n",
 | |
|     "        resp = chain.run(product=\"toothpaste\")\n",
 | |
|     "        print(resp)\n",
 | |
|     "\n",
 | |
|     "\n",
 | |
|     "async def async_generate(chain):\n",
 | |
|     "    resp = await chain.arun(product=\"toothpaste\")\n",
 | |
|     "    print(resp)\n",
 | |
|     "\n",
 | |
|     "\n",
 | |
|     "async def generate_concurrently():\n",
 | |
|     "    llm = OpenAI(temperature=0.9)\n",
 | |
|     "    prompt = PromptTemplate(\n",
 | |
|     "        input_variables=[\"product\"],\n",
 | |
|     "        template=\"What is a good name for a company that makes {product}?\",\n",
 | |
|     "    )\n",
 | |
|     "    chain = LLMChain(llm=llm, prompt=prompt)\n",
 | |
|     "    tasks = [async_generate(chain) for _ in range(5)]\n",
 | |
|     "    await asyncio.gather(*tasks)\n",
 | |
|     "\n",
 | |
|     "s = time.perf_counter()\n",
 | |
|     "# If running this outside of Jupyter, use asyncio.run(generate_concurrently())\n",
 | |
|     "await generate_concurrently()\n",
 | |
|     "elapsed = time.perf_counter() - s\n",
 | |
|     "print('\\033[1m' + f\"Concurrent executed in {elapsed:0.2f} seconds.\" + '\\033[0m')\n",
 | |
|     "\n",
 | |
|     "s = time.perf_counter()\n",
 | |
|     "generate_serially()\n",
 | |
|     "elapsed = time.perf_counter() - s\n",
 | |
|     "print('\\033[1m' + f\"Serial executed in {elapsed:0.2f} seconds.\" + '\\033[0m')"
 | |
|    ]
 | |
|   }
 | |
|  ],
 | |
|  "metadata": {
 | |
|   "kernelspec": {
 | |
|    "display_name": "Python 3 (ipykernel)",
 | |
|    "language": "python",
 | |
|    "name": "python3"
 | |
|   },
 | |
|   "language_info": {
 | |
|    "codemirror_mode": {
 | |
|     "name": "ipython",
 | |
|     "version": 3
 | |
|    },
 | |
|    "file_extension": ".py",
 | |
|    "mimetype": "text/x-python",
 | |
|    "name": "python",
 | |
|    "nbconvert_exporter": "python",
 | |
|    "pygments_lexer": "ipython3",
 | |
|    "version": "3.9.1"
 | |
|   }
 | |
|  },
 | |
|  "nbformat": 4,
 | |
|  "nbformat_minor": 5
 | |
| }
 |