From 88dc479c4aeefcbee41e4215ea046e57c742a023 Mon Sep 17 00:00:00 2001 From: ccurme Date: Fri, 7 Mar 2025 16:29:05 -0500 Subject: [PATCH] docs: update model used in ChatGroq (#30170) `mixtral-8x7b-32768` is being retired on March 20. --- docs/docs/integrations/chat/groq.ipynb | 41 ++++++++++---------------- 1 file changed, 15 insertions(+), 26 deletions(-) diff --git a/docs/docs/integrations/chat/groq.ipynb b/docs/docs/integrations/chat/groq.ipynb index 7f5bd28b3ed..a1696fba7a4 100644 --- a/docs/docs/integrations/chat/groq.ipynb +++ b/docs/docs/integrations/chat/groq.ipynb @@ -85,21 +85,10 @@ }, { "cell_type": "code", - "execution_count": 3, - "id": "652d6238-1f87-422a-b135-f5abbb8652fc", + "execution_count": null, + "id": "3f3f510e-2afe-4e76-be41-c5a9665aea63", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.0\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.1.2\u001b[0m\n", - "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n", - "Note: you may need to restart the kernel to use updated packages.\n" - ] - } - ], + "outputs": [], "source": [ "%pip install -qU langchain-groq" ] @@ -116,7 +105,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 1, "id": "cb09c344-1836-4e0c-acf8-11d13ac1dbae", "metadata": {}, "outputs": [], @@ -124,7 +113,7 @@ "from langchain_groq import ChatGroq\n", "\n", "llm = ChatGroq(\n", - " model=\"mixtral-8x7b-32768\",\n", + " model=\"llama-3.1-8b-instant\",\n", " temperature=0,\n", " max_tokens=None,\n", " timeout=None,\n", @@ -143,7 +132,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 2, "id": "62e0dbc3", "metadata": { "tags": [] @@ -152,10 +141,10 @@ { "data": { "text/plain": [ - "AIMessage(content='I enjoy programming. (The French translation is: \"J\\'aime programmer.\")\\n\\nNote: I chose to translate \"I love programming\" as \"J\\'aime programmer\" instead of \"Je suis amoureux de programmer\" because the latter has a romantic connotation that is not present in the original English sentence.', response_metadata={'token_usage': {'completion_tokens': 73, 'prompt_tokens': 31, 'total_tokens': 104, 'completion_time': 0.1140625, 'prompt_time': 0.003352463, 'queue_time': None, 'total_time': 0.117414963}, 'model_name': 'mixtral-8x7b-32768', 'system_fingerprint': 'fp_c5f20b5bb1', 'finish_reason': 'stop', 'logprobs': None}, id='run-64433c19-eadf-42fc-801e-3071e3c40160-0', usage_metadata={'input_tokens': 31, 'output_tokens': 73, 'total_tokens': 104})" + "AIMessage(content='The translation of \"I love programming\" to French is:\\n\\n\"J\\'adore le programmation.\"', additional_kwargs={}, response_metadata={'token_usage': {'completion_tokens': 22, 'prompt_tokens': 55, 'total_tokens': 77, 'completion_time': 0.029333333, 'prompt_time': 0.003502892, 'queue_time': 0.553054073, 'total_time': 0.032836225}, 'model_name': 'llama-3.1-8b-instant', 'system_fingerprint': 'fp_a491995411', 'finish_reason': 'stop', 'logprobs': None}, id='run-2b2da04a-993c-40ab-becc-201eab8b1a1b-0', usage_metadata={'input_tokens': 55, 'output_tokens': 22, 'total_tokens': 77})" ] }, - "execution_count": 5, + "execution_count": 2, "metadata": {}, "output_type": "execute_result" } @@ -174,7 +163,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 3, "id": "d86145b3-bfef-46e8-b227-4dda5c9c2705", "metadata": {}, "outputs": [ @@ -182,9 +171,9 @@ "name": "stdout", "output_type": "stream", "text": [ - "I enjoy programming. (The French translation is: \"J'aime programmer.\")\n", + "The translation of \"I love programming\" to French is:\n", "\n", - "Note: I chose to translate \"I love programming\" as \"J'aime programmer\" instead of \"Je suis amoureux de programmer\" because the latter has a romantic connotation that is not present in the original English sentence.\n" + "\"J'adore le programmation.\"\n" ] } ], @@ -204,17 +193,17 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 4, "id": "e197d1d7-a070-4c96-9f8a-a0e86d046e0b", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "AIMessage(content='That\\'s great! I can help you translate English phrases related to programming into German.\\n\\n\"I love programming\" can be translated as \"Ich liebe Programmieren\" in German.\\n\\nHere are some more programming-related phrases translated into German:\\n\\n* \"Programming language\" = \"Programmiersprache\"\\n* \"Code\" = \"Code\"\\n* \"Variable\" = \"Variable\"\\n* \"Function\" = \"Funktion\"\\n* \"Array\" = \"Array\"\\n* \"Object-oriented programming\" = \"Objektorientierte Programmierung\"\\n* \"Algorithm\" = \"Algorithmus\"\\n* \"Data structure\" = \"Datenstruktur\"\\n* \"Debugging\" = \"Fehlersuche\"\\n* \"Compile\" = \"Kompilieren\"\\n* \"Link\" = \"Verknüpfen\"\\n* \"Run\" = \"Ausführen\"\\n* \"Test\" = \"Testen\"\\n* \"Deploy\" = \"Bereitstellen\"\\n* \"Version control\" = \"Versionskontrolle\"\\n* \"Open source\" = \"Open Source\"\\n* \"Software development\" = \"Softwareentwicklung\"\\n* \"Agile methodology\" = \"Agile Methodik\"\\n* \"DevOps\" = \"DevOps\"\\n* \"Cloud computing\" = \"Cloud Computing\"\\n\\nI hope this helps! Let me know if you have any other questions or if you need further translations.', response_metadata={'token_usage': {'completion_tokens': 331, 'prompt_tokens': 25, 'total_tokens': 356, 'completion_time': 0.520006542, 'prompt_time': 0.00250165, 'queue_time': None, 'total_time': 0.522508192}, 'model_name': 'mixtral-8x7b-32768', 'system_fingerprint': 'fp_c5f20b5bb1', 'finish_reason': 'stop', 'logprobs': None}, id='run-74207fb7-85d3-417d-b2b9-621116b75d41-0', usage_metadata={'input_tokens': 25, 'output_tokens': 331, 'total_tokens': 356})" + "AIMessage(content='Ich liebe Programmieren.', additional_kwargs={}, response_metadata={'token_usage': {'completion_tokens': 6, 'prompt_tokens': 50, 'total_tokens': 56, 'completion_time': 0.008, 'prompt_time': 0.003337935, 'queue_time': 0.20949214500000002, 'total_time': 0.011337935}, 'model_name': 'llama-3.1-8b-instant', 'system_fingerprint': 'fp_a491995411', 'finish_reason': 'stop', 'logprobs': None}, id='run-e33b48dc-5e55-466e-9ebd-7b48c81c3cbd-0', usage_metadata={'input_tokens': 50, 'output_tokens': 6, 'total_tokens': 56})" ] }, - "execution_count": 7, + "execution_count": 4, "metadata": {}, "output_type": "execute_result" } @@ -269,7 +258,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.9" + "version": "3.10.4" } }, "nbformat": 4,