mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-25 04:49:17 +00:00
docs: Updated Together integration docs (#21388)
**Description:** Updated the together integration docs by leading with the streaming example, explicitly specifying a model to show users how to do that, and updating the sections to more closely match other integrations.
This commit is contained in:
@@ -7,20 +7,18 @@
|
||||
"source": [
|
||||
"# Together AI\n",
|
||||
"\n",
|
||||
"> The Together API makes it easy to query and fine-tune leading open-source models with a couple lines of code. We have integrated the world’s leading open-source models, including Llama-3, Mixtral, DBRX, Stable Diffusion XL, and more. Read more: https://together.ai\n",
|
||||
"[Together AI](https://www.together.ai/) offers an API to query [50+ leading open-source models](https://docs.together.ai/docs/inference-models) in a couple lines of code.\n",
|
||||
"\n",
|
||||
"To use, you'll need an API key which you can find here:\n",
|
||||
"https://api.together.ai/settings/api-keys. This can be passed in as init param\n",
|
||||
"``together_api_key`` or set as environment variable ``TOGETHER_API_KEY``.\n",
|
||||
"\n",
|
||||
"Together API reference: https://docs.together.ai"
|
||||
"This example goes over how to use LangChain to interact with Together AI models."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "1c47fc36",
|
||||
"metadata": {},
|
||||
"source": []
|
||||
"source": [
|
||||
"## Installation"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
@@ -29,7 +27,27 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%pip install --upgrade --quiet langchain-together"
|
||||
"%pip install --upgrade langchain-together"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "89883202",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Environment\n",
|
||||
"\n",
|
||||
"To use Together AI, you'll need an API key which you can find here:\n",
|
||||
"https://api.together.ai/settings/api-keys. This can be passed in as an init param\n",
|
||||
"``together_api_key`` or set as environment variable ``TOGETHER_API_KEY``.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "8304b4d9",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Example"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -39,19 +57,22 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Running chat completions with Together AI\n",
|
||||
"# Querying chat models with Together AI\n",
|
||||
"\n",
|
||||
"from langchain_core.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_together import ChatTogether\n",
|
||||
"\n",
|
||||
"chat = ChatTogether()\n",
|
||||
"# choose from our 50+ models here: https://docs.together.ai/docs/inference-models\n",
|
||||
"chat = ChatTogether(\n",
|
||||
" # together_api_key=\"YOUR_API_KEY\",\n",
|
||||
" model=\"meta-llama/Llama-3-70b-chat-hf\",\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"# using chat invoke\n",
|
||||
"chat.invoke(\"Tell me fun things to do in NYC\")\n",
|
||||
"\n",
|
||||
"# using chat stream\n",
|
||||
"# stream the response back from the model\n",
|
||||
"for m in chat.stream(\"Tell me fun things to do in NYC\"):\n",
|
||||
" print(m)"
|
||||
" print(m.content, end=\"\", flush=True)\n",
|
||||
"\n",
|
||||
"# if you don't want to do streaming, you can use the invoke method\n",
|
||||
"# chat.invoke(\"Tell me fun things to do in NYC\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -61,7 +82,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Running completions with Together AI\n",
|
||||
"# Querying code and language models with Together AI\n",
|
||||
"\n",
|
||||
"from langchain_together import Together\n",
|
||||
"\n",
|
||||
|
Reference in New Issue
Block a user