From 414bddd5f0558117d66fd046d7e76b1d1fe82850 Mon Sep 17 00:00:00 2001 From: ggeutzzang Date: Wed, 13 Dec 2023 10:31:08 +0900 Subject: [PATCH] DOC: model update in 'Using OpenAI Functions' docs (#14486) - **Description:** : I just update the openai functions docs to use the latest model (ex. gpt-3.5-turbo-1106) https://python.langchain.com/docs/modules/chains/how_to/openai_functions The reason is as follow: After reviewing the OpenAI Function Calling official guide at https://platform.openai.com/docs/guides/function-calling, the following information was noted: > "The latest models (gpt-3.5-turbo-1106 and gpt-4-1106-preview) have been trained to both detect when a function should be called (depending on the input) and to respond with JSON that adheres to the function signature more closely than previous models. With this capability also comes potential risks. We strongly recommend building in user confirmation flows before taking actions that impact the world on behalf of users (sending an email, posting something online, making a purchase, etc)." CC: @efriis --- docs/docs/modules/chains/how_to/openai_functions.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docs/modules/chains/how_to/openai_functions.ipynb b/docs/docs/modules/chains/how_to/openai_functions.ipynb index f5d4a5cd9d2..37478784f84 100644 --- a/docs/docs/modules/chains/how_to/openai_functions.ipynb +++ b/docs/docs/modules/chains/how_to/openai_functions.ipynb @@ -85,8 +85,8 @@ } ], "source": [ - "# If we pass in a model explicitly, we need to make sure it supports the OpenAI function-calling API.\n", - "llm = ChatOpenAI(model=\"gpt-4\", temperature=0)\n", + "# For better results in OpenAI function-calling API, it is recommended to explicitly pass the latest model.\n", + "llm = ChatOpenAI(model=\"gpt-3.5-turbo-1106\", temperature=0)\n", "prompt = ChatPromptTemplate.from_messages(\n", " [\n", " (\n",