fireworks[minor]: remove default model and temperature (#30965)

`mixtral-8x-7b-instruct` was recently retired from Fireworks Serverless.

Here we remove the default model altogether, so that the model must be
explicitly specified on init:
```python
ChatFireworks(model="accounts/fireworks/models/llama-v3p1-70b-instruct")  # for example
```

We also set a null default for `temperature`, which previously defaulted
to 0.0. This parameter will no longer be included in request payloads
unless it is explicitly provided.
This commit is contained in:
ccurme
2025-04-22 15:58:58 -04:00
committed by GitHub
parent 4be55f7c89
commit eedda164c6
6 changed files with 27 additions and 89 deletions

View File

@@ -90,7 +90,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": null,
"id": "d285fd7f",
"metadata": {},
"outputs": [],
@@ -99,7 +99,7 @@
"\n",
"# Initialize a Fireworks model\n",
"llm = Fireworks(\n",
" model=\"accounts/fireworks/models/mixtral-8x7b-instruct\",\n",
" model=\"accounts/fireworks/models/llama-v3p1-8b-instruct\",\n",
" base_url=\"https://api.fireworks.ai/inference/v1/completions\",\n",
")"
]
@@ -176,7 +176,7 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": null,
"id": "b801c20d",
"metadata": {},
"outputs": [
@@ -192,7 +192,7 @@
"source": [
"# Setting additional parameters: temperature, max_tokens, top_p\n",
"llm = Fireworks(\n",
" model=\"accounts/fireworks/models/mixtral-8x7b-instruct\",\n",
" model=\"accounts/fireworks/models/llama-v3p1-8b-instruct\",\n",
" temperature=0.7,\n",
" max_tokens=15,\n",
" top_p=1.0,\n",
@@ -218,7 +218,7 @@
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": null,
"id": "fd2c6bc1",
"metadata": {},
"outputs": [
@@ -235,7 +235,7 @@
"from langchain_fireworks import Fireworks\n",
"\n",
"llm = Fireworks(\n",
" model=\"accounts/fireworks/models/mixtral-8x7b-instruct\",\n",
" model=\"accounts/fireworks/models/llama-v3p1-8b-instruct\",\n",
" temperature=0.7,\n",
" max_tokens=15,\n",
" top_p=1.0,\n",