diff --git a/templates/llama2-functions/README.md b/templates/llama2-functions/README.md index bbb86668708..f16989e969c 100644 --- a/templates/llama2-functions/README.md +++ b/templates/llama2-functions/README.md @@ -8,6 +8,8 @@ By default, it will extract the title and author of papers. ## LLM -This template will use `Replicate` [hosted version](https://replicate.com/andreasjansson/llama-2-13b-chat-gguf) of LLaMA. +This template will use a `Replicate` [hosted version](https://replicate.com/andreasjansson/llama-2-13b-chat-gguf) of LLaMA2 that has support for grammars and jsonschema. + +Based on the `Replicate` example, these are supplied directly in the prompt. Be sure that `REPLICATE_API_TOKEN` is set in your environment. \ No newline at end of file diff --git a/templates/llama2-functions/llama2-functions.ipynb b/templates/llama2-functions/llama2-functions.ipynb index 1d6e89488e7..1a99cd01d05 100644 --- a/templates/llama2-functions/llama2-functions.ipynb +++ b/templates/llama2-functions/llama2-functions.ipynb @@ -1,6 +1,7 @@ { "cells": [ { + "attachments": {}, "cell_type": "markdown", "id": "9faf648c-541e-4368-82a8-96287dbf34de", "metadata": {}, @@ -23,6 +24,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "id": "67306dbd-d79c-4723-825e-7d88edb811ba", "metadata": {}, @@ -43,7 +45,8 @@ "outputs": [], "source": [ "from langserve.client import RemoteRunnable\n", - "llama2_function = RemoteRunnable('http://0.0.0.0:8001/llama2_functions')" + "llama2_function = RemoteRunnable('http://0.0.0.0:8001/llama2_functions')\n", + "llama2_function.invoke({\"input\":text[0].page_content[0:1500]})" ] } ],