diff --git a/docs/docs/integrations/chat/cerebras.ipynb b/docs/docs/integrations/chat/cerebras.ipynb index e6ad0cf8938..1713681f010 100644 --- a/docs/docs/integrations/chat/cerebras.ipynb +++ b/docs/docs/integrations/chat/cerebras.ipynb @@ -139,7 +139,7 @@ "from langchain_cerebras import ChatCerebras\n", "\n", "llm = ChatCerebras(\n", - " model=\"llama3.1-70b\",\n", + " model=\"llama-3.3-70b\",\n", " # other params...\n", ")" ] @@ -215,7 +215,7 @@ "from langchain_core.prompts import ChatPromptTemplate\n", "\n", "llm = ChatCerebras(\n", - " model=\"llama3.1-70b\",\n", + " model=\"llama-3.3-70b\",\n", " # other params...\n", ")\n", "\n", @@ -280,7 +280,7 @@ "from langchain_core.prompts import ChatPromptTemplate\n", "\n", "llm = ChatCerebras(\n", - " model=\"llama3.1-70b\",\n", + " model=\"llama-3.3-70b\",\n", " # other params...\n", ")\n", "\n", @@ -324,7 +324,7 @@ "from langchain_core.prompts import ChatPromptTemplate\n", "\n", "llm = ChatCerebras(\n", - " model=\"llama3.1-70b\",\n", + " model=\"llama-3.3-70b\",\n", " # other params...\n", ")\n", "\n", @@ -371,7 +371,7 @@ "from langchain_core.prompts import ChatPromptTemplate\n", "\n", "llm = ChatCerebras(\n", - " model=\"llama3.1-70b\",\n", + " model=\"llama-3.3-70b\",\n", " # other params...\n", ")\n", "\n",