From a042e804b45e4a00e2b3755bf142485d9b3a5c54 Mon Sep 17 00:00:00 2001 From: Asaf Joseph Gardin <39553475+Josephasafg@users.noreply.github.com> Date: Tue, 21 May 2024 22:27:46 +0300 Subject: [PATCH] ai21: AI21 Jamba docs (#21978) - Updated docs to have an example to use Jamba instead of J2 --------- Co-authored-by: Asaf Gardin Co-authored-by: Erick Friis --- docs/docs/integrations/chat/ai21.ipynb | 10 +--------- libs/partners/ai21/README.md | 4 +++- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/docs/docs/integrations/chat/ai21.ipynb b/docs/docs/integrations/chat/ai21.ipynb index 45f4a969bf5..fed515dd5a7 100644 --- a/docs/docs/integrations/chat/ai21.ipynb +++ b/docs/docs/integrations/chat/ai21.ipynb @@ -95,7 +95,7 @@ "from langchain_ai21 import ChatAI21\n", "from langchain_core.prompts import ChatPromptTemplate\n", "\n", - "chat = ChatAI21(model=\"j2-ultra\")\n", + "chat = ChatAI21(model=\"jamba-instruct\")\n", "\n", "prompt = ChatPromptTemplate.from_messages(\n", " [\n", @@ -107,14 +107,6 @@ "chain = prompt | chat\n", "chain.invoke({\"english_text\": \"Hello, how are you?\"})" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c159a79f", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/libs/partners/ai21/README.md b/libs/partners/ai21/README.md index 508810a81ad..98b8815e980 100644 --- a/libs/partners/ai21/README.md +++ b/libs/partners/ai21/README.md @@ -27,11 +27,13 @@ Then initialize from langchain_core.messages import HumanMessage from langchain_ai21.chat_models import ChatAI21 -chat = ChatAI21(model="j2-ultra") +chat = ChatAI21(model="jamab-instruct") messages = [HumanMessage(content="Hello from AI21")] chat.invoke(messages) ``` +For a list of the supported models, see [this page](https://docs.ai21.com/reference/python-sdk#chat) + ## LLMs You can use AI21's generative AI models as Langchain LLMs: