From f9b4e501a88c537b130da05bb0b93fc562f2280d Mon Sep 17 00:00:00 2001 From: Deepesh Dhakal Date: Tue, 12 Aug 2025 05:25:35 +0900 Subject: [PATCH] fix(docs): update `llamacpp.ipynb` for installation options on Mac (#32341) The previous code generated data invalid error. --------- Co-authored-by: Mason Daugherty Co-authored-by: Mason Daugherty Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- docs/docs/integrations/llms/llamacpp.ipynb | 26 ++++++---------------- 1 file changed, 7 insertions(+), 19 deletions(-) diff --git a/docs/docs/integrations/llms/llamacpp.ipynb b/docs/docs/integrations/llms/llamacpp.ipynb index 965dac979b4..5fbfdf5ef74 100644 --- a/docs/docs/integrations/llms/llamacpp.ipynb +++ b/docs/docs/integrations/llms/llamacpp.ipynb @@ -44,9 +44,7 @@ "tags": [] }, "outputs": [], - "source": [ - "%pip install --upgrade --quiet llama-cpp-python" - ] + "source": "%pip install --upgrade --quiet llama-cpp-python" }, { "cell_type": "markdown", @@ -64,9 +62,7 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [ - "!CMAKE_ARGS=\"-DGGML_CUDA=on\" FORCE_CMAKE=1 pip install llama-cpp-python" - ] + "source": "!CMAKE_ARGS=\"-DGGML_CUDA=on\" FORCE_CMAKE=1 pip install llama-cpp-python" }, { "cell_type": "markdown", @@ -80,9 +76,7 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [ - "!CMAKE_ARGS=\"-DGGML_CUDA=on\" FORCE_CMAKE=1 pip install --upgrade --force-reinstall llama-cpp-python --no-cache-dir" - ] + "source": "!CMAKE_ARGS=\"-DGGML_CUDA=on\" FORCE_CMAKE=1 pip install --upgrade --force-reinstall llama-cpp-python --no-cache-dir" }, { "cell_type": "markdown", @@ -100,9 +94,7 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [ - "!CMAKE_ARGS=\"-DLLAMA_METAL=on\" FORCE_CMAKE=1 pip install llama-cpp-python" - ] + "source": "!CMAKE_ARGS=\"-DLLAMA_METAL=on\" FORCE_CMAKE=1 pip install llama-cpp-python" }, { "cell_type": "markdown", @@ -116,9 +108,7 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [ - "!CMAKE_ARGS=\"-DLLAMA_METAL=on\" FORCE_CMAKE=1 pip install --upgrade --force-reinstall llama-cpp-python --no-cache-dir" - ] + "source": "!CMAKE_ARGS=\"-DLLAMA_METAL=on\" FORCE_CMAKE=1 pip install llama-cpp-python --force-reinstall --no-binary :all: --no-cache-dir" }, { "cell_type": "markdown", @@ -174,9 +164,7 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [ - "!python -m pip install -e . --force-reinstall --no-cache-dir" - ] + "source": "!python -m pip install -e . --force-reinstall --no-cache-dir" }, { "cell_type": "markdown", @@ -718,4 +706,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} +} \ No newline at end of file