From 5e43768f6143805202bc3dc2c96a758da5d98381 Mon Sep 17 00:00:00 2001 From: fanyou-wbd <113745616+fanyou-wbd@users.noreply.github.com> Date: Tue, 15 Aug 2023 00:50:20 -0700 Subject: [PATCH] docs: update LlamaCpp max_tokens args (#9238) This PR updates documentations only, `max_length` should be `max_tokens` according to latest LlamaCpp API doc: https://api.python.langchain.com/en/latest/llms/langchain.llms.llamacpp.LlamaCpp.html --- docs/extras/integrations/llms/llamacpp.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/integrations/llms/llamacpp.ipynb b/docs/extras/integrations/llms/llamacpp.ipynb index 59d6338960c..68c8680b050 100644 --- a/docs/extras/integrations/llms/llamacpp.ipynb +++ b/docs/extras/integrations/llms/llamacpp.ipynb @@ -242,7 +242,7 @@ "llm = LlamaCpp(\n", " model_path=\"/Users/rlm/Desktop/Code/llama/llama-2-7b-ggml/llama-2-7b-chat.ggmlv3.q4_0.bin\",\n", " temperature=0.75,\n", - " max_length=2000,\n", + " max_tokens=2000,\n", " top_p=1,\n", " callback_manager=callback_manager,\n", " verbose=True,\n",