From 4b7a7ed4b1412aca07578bf52b188c06fdd43990 Mon Sep 17 00:00:00 2001 From: ccurme Date: Tue, 20 Aug 2024 08:59:11 -0400 Subject: [PATCH] v0.1 docs: add callout to OllamaFunctions docs (#25579) --- docs/docs/integrations/chat/ollama_functions.ipynb | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/docs/integrations/chat/ollama_functions.ipynb b/docs/docs/integrations/chat/ollama_functions.ipynb index ae4d0ac205e..6d0e20cad9c 100644 --- a/docs/docs/integrations/chat/ollama_functions.ipynb +++ b/docs/docs/integrations/chat/ollama_functions.ipynb @@ -15,6 +15,12 @@ "source": [ "# OllamaFunctions\n", "\n", + ":::{.callout-caution}\n", + "\n", + "This was an experimental wrapper that bolted-on tool calling support to models that do not natively support it. The primary Ollama integration now supports tool calling, and should be used instead. See example usage in LangChain v0.2 documentation [here](https://python.langchain.com/v0.2/docs/integrations/chat/ollama/).\n", + "\n", + ":::\n", + "\n", "This notebook shows how to use an experimental wrapper around Ollama that gives it the same API as OpenAI Functions.\n", "\n", "Note that more powerful and capable models will perform better with complex schema and/or multiple functions. The examples below use llama3 and phi3 models.\n",