ai21: AI21 Jamba docs (#21978)

- Updated docs to have an example to use Jamba instead of J2

---------

Co-authored-by: Asaf Gardin <asafg@ai21.com>
Co-authored-by: Erick Friis <erick@langchain.dev>
This commit is contained in:
Asaf Joseph Gardin 2024-05-21 22:27:46 +03:00 committed by GitHub
parent 4cf523949a
commit a042e804b4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 4 additions and 10 deletions

View File

@ -95,7 +95,7 @@
"from langchain_ai21 import ChatAI21\n", "from langchain_ai21 import ChatAI21\n",
"from langchain_core.prompts import ChatPromptTemplate\n", "from langchain_core.prompts import ChatPromptTemplate\n",
"\n", "\n",
"chat = ChatAI21(model=\"j2-ultra\")\n", "chat = ChatAI21(model=\"jamba-instruct\")\n",
"\n", "\n",
"prompt = ChatPromptTemplate.from_messages(\n", "prompt = ChatPromptTemplate.from_messages(\n",
" [\n", " [\n",
@ -107,14 +107,6 @@
"chain = prompt | chat\n", "chain = prompt | chat\n",
"chain.invoke({\"english_text\": \"Hello, how are you?\"})" "chain.invoke({\"english_text\": \"Hello, how are you?\"})"
] ]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c159a79f",
"metadata": {},
"outputs": [],
"source": []
} }
], ],
"metadata": { "metadata": {

View File

@ -27,11 +27,13 @@ Then initialize
from langchain_core.messages import HumanMessage from langchain_core.messages import HumanMessage
from langchain_ai21.chat_models import ChatAI21 from langchain_ai21.chat_models import ChatAI21
chat = ChatAI21(model="j2-ultra") chat = ChatAI21(model="jamab-instruct")
messages = [HumanMessage(content="Hello from AI21")] messages = [HumanMessage(content="Hello from AI21")]
chat.invoke(messages) chat.invoke(messages)
``` ```
For a list of the supported models, see [this page](https://docs.ai21.com/reference/python-sdk#chat)
## LLMs ## LLMs
You can use AI21's generative AI models as Langchain LLMs: You can use AI21's generative AI models as Langchain LLMs: