diff --git a/docs/extras/integrations/chat/openai.ipynb b/docs/extras/integrations/chat/openai.ipynb index c94cc92e499..d75fb6efaa5 100644 --- a/docs/extras/integrations/chat/openai.ipynb +++ b/docs/extras/integrations/chat/openai.ipynb @@ -143,12 +143,39 @@ ] }, { - "cell_type": "code", - "execution_count": null, - "id": "c095285d", + "cell_type": "markdown", + "id": "57e27714", "metadata": {}, - "outputs": [], - "source": [] + "source": [ + "## Fine-tuning\n", + "\n", + "You can call fine-tuned OpenAI models by passing in your corresponding `modelName` parameter.\n", + "\n", + "This generally takes the form of `ft:{OPENAI_MODEL_NAME}:{ORG_NAME}::{MODEL_ID}`. For example:" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "33c4a8b0", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "AIMessage(content=\"J'adore la programmation.\", additional_kwargs={}, example=False)" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "fine_tuned_model = ChatOpenAI(temperature=0, model_name=\"ft:gpt-3.5-turbo-0613:langchain::7qTVM5AR\")\n", + "\n", + "fine_tuned_model(messages)" + ] } ], "metadata": { @@ -167,7 +194,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.7" + "version": "3.10.5" } }, "nbformat": 4,