From 19c2797bed50cb8d4eedf65d8ff1c4ba24ebdde0 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Wed, 15 Feb 2023 23:04:28 -0800 Subject: [PATCH] add anthropic example (#1041) Co-authored-by: Ivan Vendrov Co-authored-by: Sasmitha Manathunga <70096033+mmz-001@users.noreply.github.com> --- docs/modules/llms/integrations.rst | 2 + .../llms/integrations/anthropic_example.ipynb | 110 ++++++++++++++++++ .../llms/integrations/petals_example.ipynb | 13 ++- langchain/llms/anthropic.py | 14 ++- 4 files changed, 133 insertions(+), 6 deletions(-) create mode 100644 docs/modules/llms/integrations/anthropic_example.ipynb diff --git a/docs/modules/llms/integrations.rst b/docs/modules/llms/integrations.rst index 59592fdac0d..e57af86962b 100644 --- a/docs/modules/llms/integrations.rst +++ b/docs/modules/llms/integrations.rst @@ -19,6 +19,8 @@ The examples here are all "how-to" guides for how to integrate with various LLM `PromptLayer OpenAI <./integrations/promptlayer_openai.html>`_: Covers how to use `PromptLayer `_ with Langchain. +`Anthropic <./integrations/anthropic_example.html>`_: Covers how to use Anthropic models with Langchain. + .. toctree:: :maxdepth: 1 diff --git a/docs/modules/llms/integrations/anthropic_example.ipynb b/docs/modules/llms/integrations/anthropic_example.ipynb new file mode 100644 index 00000000000..07d7b79a736 --- /dev/null +++ b/docs/modules/llms/integrations/anthropic_example.ipynb @@ -0,0 +1,110 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "9597802c", + "metadata": {}, + "source": [ + "# Anthropic\n", + "This example goes over how to use LangChain to interact with Anthropic models" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "6fb585dd", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.llms import Anthropic\n", + "from langchain import PromptTemplate, LLMChain" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "035dea0f", + "metadata": {}, + "outputs": [], + "source": [ + "template = \"\"\"Question: {question}\n", + "\n", + "Answer: Let's think step by step.\"\"\"\n", + "\n", + "prompt = PromptTemplate(template=template, input_variables=[\"question\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "3f3458d9", + "metadata": {}, + "outputs": [], + "source": [ + "llm = Anthropic()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "a641dbd9", + "metadata": {}, + "outputs": [], + "source": [ + "llm_chain = LLMChain(prompt=prompt, llm=llm)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "9f844993", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\" Step 1: Justin Beiber was born on March 1, 1994\\nStep 2: The NFL season ends with the Super Bowl in January/February\\nStep 3: Therefore, the Super Bowl that occurred closest to Justin Beiber's birth would be Super Bowl XXIX in 1995\\nStep 4: The San Francisco 49ers won Super Bowl XXIX in 1995\\n\\nTherefore, the answer is the San Francisco 49ers won the Super Bowl in the year Justin Beiber was born.\"" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "question = \"What NFL team won the Super Bowl in the year Justin Beiber was born?\"\n", + "\n", + "llm_chain.run(question)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4797d719", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.1" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/modules/llms/integrations/petals_example.ipynb b/docs/modules/llms/integrations/petals_example.ipynb index 68acf274648..57289585d60 100644 --- a/docs/modules/llms/integrations/petals_example.ipynb +++ b/docs/modules/llms/integrations/petals_example.ipynb @@ -136,15 +136,22 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3.9.12 ('palm')", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", "name": "python", - "version": "3.9.12" + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.1" }, - "orig_nbformat": 4, "vscode": { "interpreter": { "hash": "a0a0263b650d907a3bfe41c0f8d6a63a071b884df3cfdc1579f00cdc1aed6b03" diff --git a/langchain/llms/anthropic.py b/langchain/llms/anthropic.py index 91fc3202b6a..f2e1df4d5d1 100644 --- a/langchain/llms/anthropic.py +++ b/langchain/llms/anthropic.py @@ -1,4 +1,5 @@ """Wrapper around Anthropic APIs.""" +import re from typing import Any, Dict, Generator, List, Mapping, Optional from pydantic import BaseModel, Extra, root_validator @@ -32,7 +33,7 @@ class Anthropic(LLM, BaseModel): """ client: Any #: :meta private: - model: Optional[str] = None + model: str = "claude-v1" """Model name to use.""" max_tokens_to_sample: int = 256 @@ -99,10 +100,17 @@ class Anthropic(LLM, BaseModel): def _wrap_prompt(self, prompt: str) -> str: if not self.HUMAN_PROMPT or not self.AI_PROMPT: raise NameError("Please ensure the anthropic package is loaded") + if prompt.startswith(self.HUMAN_PROMPT): return prompt # Already wrapped. - else: - return f"{self.HUMAN_PROMPT} {prompt}{self.AI_PROMPT} Sure, here you go:\n" + + # Guard against common errors in specifying wrong number of newlines. + corrected_prompt, n_subs = re.subn(r"^\n*Human:", self.HUMAN_PROMPT, prompt) + if n_subs == 1: + return corrected_prompt + + # As a last resort, wrap the prompt ourselves to emulate instruct-style. + return f"{self.HUMAN_PROMPT} {prompt}{self.AI_PROMPT} Sure, here you go:\n" def _get_anthropic_stop(self, stop: Optional[List[str]] = None) -> List[str]: if not self.HUMAN_PROMPT or not self.AI_PROMPT: