{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# GPT4all\n", "\n", "This example goes over how to use LangChain to interact with GPT4All models" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "!pip install pyllamacpp" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from langchain.llms import GPT4All\n", "from langchain import PromptTemplate, LLMChain" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "template = \"\"\"Question: {question}\n", "\n", "Answer: Let's think step by step.\"\"\"\n", "\n", "prompt = PromptTemplate(template=template, input_variables=[\"question\"])" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# You'll need to download a compatible model and convert it to ggml.\n", "# See: https://github.com/nomic-ai/gpt4all for more information.\n", "llm = GPT4All(model=\"./models/gpt4all-model.bin\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "llm_chain = LLMChain(prompt=prompt, llm=llm)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "question = \"What NFL team won the Super Bowl in the year Justin Bieber was born?\"\n", "\n", "llm_chain.run(question)" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.1" } }, "nbformat": 4, "nbformat_minor": 2 }