From 150d58304d53c71c4fa000c538cd35a640c96e92 Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Thu, 9 Nov 2023 08:04:51 -0800 Subject: [PATCH] update oai cookbooks (#13135) --- cookbook/extraction_openai_tools.ipynb | 23 ++++++----- cookbook/openai_v1_cookbook.ipynb | 56 +++++++++++++++++++++++--- 2 files changed, 62 insertions(+), 17 deletions(-) diff --git a/cookbook/extraction_openai_tools.ipynb b/cookbook/extraction_openai_tools.ipynb index df4229f5434..e45a81ccba2 100644 --- a/cookbook/extraction_openai_tools.ipynb +++ b/cookbook/extraction_openai_tools.ipynb @@ -5,7 +5,7 @@ "id": "2def22ea", "metadata": {}, "source": [ - "# Extration with OpenAI Tools\n", + "# Extraction with OpenAI Tools\n", "\n", "Performing extraction has never been easier! OpenAI's tool calling ability is the perfect thing to use as it allows for extracting multiple different elements from text that are different types. \n", "\n", @@ -144,14 +144,16 @@ "metadata": {}, "source": [ "```python\n", - "from langchain.output_parsers import PydanticToolsParser\n", - "from langchain.utils.openai_functions import convert_pydantic_to_openai_function\n", - "from langchain.schema.runnable import Runnable\n", "from typing import Union, List, Type, Optional\n", + "\n", + "from langchain.output_parsers.openai_tools import PydanticToolsParser\n", + "from langchain.utils.openai_functions import convert_pydantic_to_openai_tool\n", + "from langchain.schema.runnable import Runnable\n", "from langchain.pydantic_v1 import BaseModel\n", "from langchain.prompts import ChatPromptTemplate\n", "from langchain.schema.messages import SystemMessage\n", "from langchain.schema.language_model import BaseLanguageModel\n", + "\n", "_EXTRACTION_TEMPLATE = \"\"\"Extract and save the relevant entities mentioned \\\n", "in the following passage together with their properties.\n", "\n", @@ -161,16 +163,15 @@ "def create_extraction_chain_pydantic(\n", " pydantic_schemas: Union[List[Type[BaseModel]], Type[BaseModel]],\n", " llm: BaseLanguageModel,\n", - " system_message: Optional[str] = _EXTRACTION_TEMPLATE,\n", + " system_message: str = _EXTRACTION_TEMPLATE,\n", ") -> Runnable:\n", " if not isinstance(pydantic_schemas, list):\n", " pydantic_schemas = [pydantic_schemas]\n", - " prompt = ChatPromptTemplate.from_messages({\n", - " (\"system\", _EXTRACTION_TEMPLATE),\n", + " prompt = ChatPromptTemplate.from_messages([\n", + " (\"system\", system_message),\n", " (\"user\", \"{input}\")\n", - " })\n", - " functions = [convert_pydantic_to_openai_function(p) for p in pydantic_schemas]\n", - " tools = [{\"type\": \"function\", \"function\": d} for d in functiMons]\n", + " ])\n", + " tools = [convert_pydantic_to_openai_tool(p) for p in pydantic_schemas]\n", " model = llm.bind(tools=tools)\n", " chain = prompt | model | PydanticToolsParser(tools=pydantic_schemas)\n", " return chain\n", @@ -202,7 +203,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.1" + "version": "3.9.1" } }, "nbformat": 4, diff --git a/cookbook/openai_v1_cookbook.ipynb b/cookbook/openai_v1_cookbook.ipynb index 518e1b8c12d..da1928feaf9 100644 --- a/cookbook/openai_v1_cookbook.ipynb +++ b/cookbook/openai_v1_cookbook.ipynb @@ -17,7 +17,7 @@ "metadata": {}, "outputs": [], "source": [ - "# need openai>=1.1.0, langchain>=0.0.332, langchain-experimental>=0.0.39\n", + "# need openai>=1.1.0, langchain>=0.0.333, langchain-experimental>=0.0.39\n", "!pip install -U openai langchain langchain-experimental" ] }, @@ -413,12 +413,56 @@ ] }, { - "cell_type": "code", - "execution_count": null, - "id": "52b48493-e4fe-4ab9-97cd-e65bf3996e30", + "cell_type": "markdown", + "id": "49944887-3972-497e-8da2-6d32d44345a9", "metadata": {}, - "outputs": [], - "source": [] + "source": [ + "## Tools\n", + "\n", + "Use tools for parallel function calling." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "916292d8-0f89-40a6-af1c-5a1122327de8", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[GetCurrentWeather(location='New York, NY', unit='fahrenheit'),\n", + " GetCurrentWeather(location='Los Angeles, CA', unit='fahrenheit'),\n", + " GetCurrentWeather(location='San Francisco, CA', unit='fahrenheit')]" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from typing import Literal\n", + "\n", + "from langchain.output_parsers.openai_tools import PydanticToolsParser\n", + "from langchain.utils.openai_functions import convert_pydantic_to_openai_tool\n", + "from langchain.prompts import ChatPromptTemplate\n", + "from langchain.pydantic_v1 import BaseModel, Field\n", + "\n", + "class GetCurrentWeather(BaseModel):\n", + " \"\"\"Get the current weather in a location.\"\"\"\n", + " location: str = Field(description=\"The city and state, e.g. San Francisco, CA\")\n", + " unit: Literal[\"celsius\", \"fahrenheit\"] = Field(default=\"fahrenheit\", description=\"The temperature unit, default to fahrenheit\")\n", + " \n", + "prompt = ChatPromptTemplate.from_messages([\n", + " (\"system\", \"You are a helpful assistant\"),\n", + " (\"user\", \"{input}\")\n", + "])\n", + "model = ChatOpenAI(model=\"gpt-3.5-turbo-1106\").bind(tools=[convert_pydantic_to_openai_tool(GetCurrentWeather)])\n", + "chain = prompt | model | PydanticToolsParser(tools=[GetCurrentWeather])\n", + "\n", + "chain.invoke({\"input\": \"what's the weather in NYC, LA, and SF\"})" + ] } ], "metadata": {