docs: fireworks fixes (#18056)

This commit is contained in:
Erick Friis 2024-02-23 15:58:53 -08:00 committed by GitHub
parent a05fb19f42
commit 1a3383fba1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 40 additions and 70 deletions

View File

@ -28,7 +28,6 @@
"id": "4a7c795e", "id": "4a7c795e",
"metadata": {}, "metadata": {},
"source": [ "source": [
"%pip install langchain\n",
"%pip install langchain-fireworks" "%pip install langchain-fireworks"
] ]
}, },
@ -44,9 +43,7 @@
}, },
"outputs": [], "outputs": [],
"source": [ "source": [
"import os\n", "from langchain_core.messages import HumanMessage, SystemMessage\n",
"\n",
"from langchain.schema import HumanMessage, SystemMessage\n",
"from langchain_fireworks import ChatFireworks" "from langchain_fireworks import ChatFireworks"
] ]
}, },
@ -57,7 +54,7 @@
"source": [ "source": [
"# Setup\n", "# Setup\n",
"\n", "\n",
"1. Make sure the `fireworks-ai` package is installed in your environment.\n", "1. Make sure the `langchain-fireworks` package is installed in your environment.\n",
"2. Sign in to [Fireworks AI](http://fireworks.ai) for the an API Key to access our models, and make sure it is set as the `FIREWORKS_API_KEY` environment variable.\n", "2. Sign in to [Fireworks AI](http://fireworks.ai) for the an API Key to access our models, and make sure it is set as the `FIREWORKS_API_KEY` environment variable.\n",
"3. Set up your model using a model id. If the model is not set, the default model is fireworks-llama-v2-7b-chat. See the full, most up-to-date model list on [app.fireworks.ai](https://app.fireworks.ai)." "3. Set up your model using a model id. If the model is not set, the default model is fireworks-llama-v2-7b-chat. See the full, most up-to-date model list on [app.fireworks.ai](https://app.fireworks.ai)."
] ]
@ -95,14 +92,6 @@
"id": "72340871-ae2f-415f-b399-0777d32dc379", "id": "72340871-ae2f-415f-b399-0777d32dc379",
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/mnt/disks/data/langchain/.venv/lib/python3.10/site-packages/langchain_core/_api/deprecation.py:117: LangChainDeprecationWarning: The function `__call__` was deprecated in LangChain 0.1.7 and will be removed in 0.2.0. Use invoke instead.\n",
" warn_deprecated(\n"
]
},
{ {
"data": { "data": {
"text/plain": [ "text/plain": [
@ -119,7 +108,7 @@
"system_message = SystemMessage(content=\"You are to chat with the user.\")\n", "system_message = SystemMessage(content=\"You are to chat with the user.\")\n",
"human_message = HumanMessage(content=\"Who are you?\")\n", "human_message = HumanMessage(content=\"Who are you?\")\n",
"\n", "\n",
"chat([system_message, human_message])" "chat.invoke([system_message, human_message])"
] ]
}, },
{ {
@ -128,20 +117,10 @@
"id": "68c6b1fa-2ff7-4a63-8d88-3cec302180b8", "id": "68c6b1fa-2ff7-4a63-8d88-3cec302180b8",
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/mnt/disks/data/langchain/.venv/lib/python3.10/site-packages/langchain_core/utils/utils.py:159: UserWarning: WARNING! top_p is not default parameter.\n",
" top_p was transferred to model_kwargs.\n",
" Please confirm that top_p is what you intended.\n",
" warnings.warn(\n"
]
},
{ {
"data": { "data": {
"text/plain": [ "text/plain": [
"AIMessage(content=\"I'm glad to chat with you! I'm an artificial intelligence and don't have\")" "AIMessage(content=\"I'm an AI and do not have the ability to experience the weather firsthand. However,\")"
] ]
}, },
"execution_count": 5, "execution_count": 5,
@ -155,12 +134,19 @@
" model=\"accounts/fireworks/models/mixtral-8x7b-instruct\",\n", " model=\"accounts/fireworks/models/mixtral-8x7b-instruct\",\n",
" temperature=1,\n", " temperature=1,\n",
" max_tokens=20,\n", " max_tokens=20,\n",
" top_p=1,\n",
")\n", ")\n",
"system_message = SystemMessage(content=\"You are to chat with the user.\")\n", "system_message = SystemMessage(content=\"You are to chat with the user.\")\n",
"human_message = HumanMessage(content=\"How's the weather today?\")\n", "human_message = HumanMessage(content=\"How's the weather today?\")\n",
"chat([system_message, human_message])" "chat.invoke([system_message, human_message])"
] ]
},
{
"cell_type": "code",
"execution_count": null,
"id": "8c44cb36",
"metadata": {},
"outputs": [],
"source": []
} }
], ],
"metadata": { "metadata": {
@ -179,7 +165,7 @@
"name": "python", "name": "python",
"nbconvert_exporter": "python", "nbconvert_exporter": "python",
"pygments_lexer": "ipython3", "pygments_lexer": "ipython3",
"version": "3.10.12" "version": "3.11.4"
} }
}, },
"nbformat": 4, "nbformat": 4,

View File

@ -19,31 +19,16 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"%pip install langchain\n", "%pip install -qU langchain-fireworks"
"%pip install langchain-fireworks"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 3, "execution_count": 2,
"id": "6b9bcdac",
"metadata": {},
"outputs": [],
"source": [
"import fireworks\n",
"import fireworks.client"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "60b6dbb2", "id": "60b6dbb2",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"import os\n",
"\n",
"from langchain.prompts import PromptTemplate\n",
"from langchain_fireworks import Fireworks" "from langchain_fireworks import Fireworks"
] ]
}, },
@ -54,14 +39,14 @@
"source": [ "source": [
"# Setup\n", "# Setup\n",
"\n", "\n",
"1. Make sure the `fireworks-ai` package is installed in your environment.\n", "1. Make sure the `langchain-fireworks` package is installed in your environment.\n",
"2. Sign in to [Fireworks AI](http://fireworks.ai) for the an API Key to access our models, and make sure it is set as the `FIREWORKS_API_KEY` environment variable.\n", "2. Sign in to [Fireworks AI](http://fireworks.ai) for the an API Key to access our models, and make sure it is set as the `FIREWORKS_API_KEY` environment variable.\n",
"3. Set up your model using a model id. If the model is not set, the default model is fireworks-llama-v2-7b-chat. See the full, most up-to-date model list on [fireworks.ai](https://fireworks.ai)." "3. Set up your model using a model id. If the model is not set, the default model is fireworks-llama-v2-7b-chat. See the full, most up-to-date model list on [fireworks.ai](https://fireworks.ai)."
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 5, "execution_count": 3,
"id": "9ca87a2e", "id": "9ca87a2e",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@ -91,35 +76,28 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 6, "execution_count": 4,
"id": "bf0a425c", "id": "bf0a425c",
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/mnt/disks/data/langchain/.venv/lib/python3.10/site-packages/langchain_core/_api/deprecation.py:117: LangChainDeprecationWarning: The function `__call__` was deprecated in LangChain 0.1.7 and will be removed in 0.2.0. Use invoke instead.\n",
" warn_deprecated(\n"
]
},
{ {
"name": "stdout", "name": "stdout",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
" With Tom Brady at a season-best 9-0, he'\n" "\n",
"Even if Tom Brady wins today, he'd still have the same\n"
] ]
} }
], ],
"source": [ "source": [
"# Single prompt\n", "# Single prompt\n",
"output = llm(\"Who's the best quarterback in the NFL?\")\n", "output = llm.invoke(\"Who's the best quarterback in the NFL?\")\n",
"print(output)" "print(output)"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 7, "execution_count": 5,
"id": "afc7de6f", "id": "afc7de6f",
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
@ -127,7 +105,7 @@
"name": "stdout", "name": "stdout",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"[[Generation(text=' In 2016, the best cricket player award should go to a')], [Generation(text=\"\\n\\nThere's a good argument that it's Kawhi Leonard\")]]\n" "[[Generation(text='\\n\\nR Ashwin is currently the best. He is an all rounder')], [Generation(text='\\nIn your opinion, who has the best overall statistics between Michael Jordan and Le')]]\n"
] ]
} }
], ],
@ -144,7 +122,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 9, "execution_count": 7,
"id": "b801c20d", "id": "b801c20d",
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
@ -152,9 +130,7 @@
"name": "stdout", "name": "stdout",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"\n", " The weather in Kansas City in December is generally cold and snowy. The\n"
"\n",
"December in Kansas City is typically cold. The average high temperature\n"
] ]
} }
], ],
@ -166,7 +142,7 @@
" max_tokens=15,\n", " max_tokens=15,\n",
" top_p=1.0,\n", " top_p=1.0,\n",
")\n", ")\n",
"print(llm(\"What's the weather like in Kansas City in December?\"))" "print(llm.invoke(\"What's the weather like in Kansas City in December?\"))"
] ]
}, },
{ {
@ -187,7 +163,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 13, "execution_count": 8,
"id": "fd2c6bc1", "id": "fd2c6bc1",
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
@ -227,7 +203,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 15, "execution_count": 9,
"id": "f644ff28", "id": "f644ff28",
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
@ -247,6 +223,14 @@
"for token in chain.stream({\"topic\": \"bears\"}):\n", "for token in chain.stream({\"topic\": \"bears\"}):\n",
" print(token, end=\"\", flush=True)" " print(token, end=\"\", flush=True)"
] ]
},
{
"cell_type": "code",
"execution_count": null,
"id": "fcc0eecb",
"metadata": {},
"outputs": [],
"source": []
} }
], ],
"metadata": { "metadata": {
@ -265,7 +249,7 @@
"name": "python", "name": "python",
"nbconvert_exporter": "python", "nbconvert_exporter": "python",
"pygments_lexer": "ipython3", "pygments_lexer": "ipython3",
"version": "3.10.12" "version": "3.11.4"
} }
}, },
"nbformat": 4, "nbformat": 4,

View File

@ -5,10 +5,10 @@ Langchain.
## Installation and setup ## Installation and setup
- Install the Fireworks client library. - Install the Fireworks integration package.
``` ```
pip install fireworks-ai pip install langchain-fireworks
``` ```
- Get a Fireworks API key by signing up at [fireworks.ai](https://fireworks.ai). - Get a Fireworks API key by signing up at [fireworks.ai](https://fireworks.ai).