mirror of
				https://github.com/hwchase17/langchain.git
				synced 2025-11-04 10:10:09 +00:00 
			
		
		
		
	
		
			
				
	
	
		
			167 lines
		
	
	
		
			4.6 KiB
		
	
	
	
		
			Plaintext
		
	
	
	
	
	
			
		
		
	
	
			167 lines
		
	
	
		
			4.6 KiB
		
	
	
	
		
			Plaintext
		
	
	
	
	
	
{
 | 
						|
 "cells": [
 | 
						|
  {
 | 
						|
   "cell_type": "markdown",
 | 
						|
   "id": "bb0735c0",
 | 
						|
   "metadata": {},
 | 
						|
   "source": [
 | 
						|
    "# How to use few shot examples\n",
 | 
						|
    "\n",
 | 
						|
    "This notebook covers how to use few shot examples in chat models.\n",
 | 
						|
    "\n",
 | 
						|
    "There does not appear to be solid consensus on how best to do few shot prompting. As a result, we are not solidifying any abstractions around this yet but rather using existing abstractions."
 | 
						|
   ]
 | 
						|
  },
 | 
						|
  {
 | 
						|
   "cell_type": "markdown",
 | 
						|
   "id": "c6e9664c",
 | 
						|
   "metadata": {},
 | 
						|
   "source": [
 | 
						|
    "## Alternating Human/AI messages\n",
 | 
						|
    "The first way of doing few shot prompting relies on using alternating human/ai messages. See an example of this below."
 | 
						|
   ]
 | 
						|
  },
 | 
						|
  {
 | 
						|
   "cell_type": "code",
 | 
						|
   "execution_count": 1,
 | 
						|
   "id": "62156fe4",
 | 
						|
   "metadata": {},
 | 
						|
   "outputs": [],
 | 
						|
   "source": [
 | 
						|
    "from langchain.chat_models import ChatOpenAI\n",
 | 
						|
    "from langchain import PromptTemplate, LLMChain\n",
 | 
						|
    "from langchain.prompts.chat import (\n",
 | 
						|
    "    ChatPromptTemplate,\n",
 | 
						|
    "    SystemMessagePromptTemplate,\n",
 | 
						|
    "    AIMessagePromptTemplate,\n",
 | 
						|
    "    HumanMessagePromptTemplate,\n",
 | 
						|
    ")\n",
 | 
						|
    "from langchain.schema import (\n",
 | 
						|
    "    AIMessage,\n",
 | 
						|
    "    HumanMessage,\n",
 | 
						|
    "    SystemMessage\n",
 | 
						|
    ")"
 | 
						|
   ]
 | 
						|
  },
 | 
						|
  {
 | 
						|
   "cell_type": "code",
 | 
						|
   "execution_count": 2,
 | 
						|
   "id": "ed7ac3c6",
 | 
						|
   "metadata": {},
 | 
						|
   "outputs": [],
 | 
						|
   "source": [
 | 
						|
    "chat = ChatOpenAI(temperature=0)"
 | 
						|
   ]
 | 
						|
  },
 | 
						|
  {
 | 
						|
   "cell_type": "code",
 | 
						|
   "execution_count": 3,
 | 
						|
   "id": "98791aa9",
 | 
						|
   "metadata": {},
 | 
						|
   "outputs": [],
 | 
						|
   "source": [
 | 
						|
    "template=\"You are a helpful assistant that translates english to pirate.\"\n",
 | 
						|
    "system_message_prompt = SystemMessagePromptTemplate.from_template(template)\n",
 | 
						|
    "example_human = HumanMessagePromptTemplate.from_template(\"Hi\")\n",
 | 
						|
    "example_ai = AIMessagePromptTemplate.from_template(\"Argh me mateys\")\n",
 | 
						|
    "human_template=\"{text}\"\n",
 | 
						|
    "human_message_prompt = HumanMessagePromptTemplate.from_template(human_template)"
 | 
						|
   ]
 | 
						|
  },
 | 
						|
  {
 | 
						|
   "cell_type": "code",
 | 
						|
   "execution_count": 4,
 | 
						|
   "id": "4eebdcd7",
 | 
						|
   "metadata": {},
 | 
						|
   "outputs": [
 | 
						|
    {
 | 
						|
     "data": {
 | 
						|
      "text/plain": [
 | 
						|
       "\"I be lovin' programmin', me hearty!\""
 | 
						|
      ]
 | 
						|
     },
 | 
						|
     "execution_count": 4,
 | 
						|
     "metadata": {},
 | 
						|
     "output_type": "execute_result"
 | 
						|
    }
 | 
						|
   ],
 | 
						|
   "source": [
 | 
						|
    "chat_prompt = ChatPromptTemplate.from_messages([system_message_prompt, example_human, example_ai, human_message_prompt])\n",
 | 
						|
    "chain = LLMChain(llm=chat, prompt=chat_prompt)\n",
 | 
						|
    "# get a chat completion from the formatted messages\n",
 | 
						|
    "chain.run(\"I love programming.\")"
 | 
						|
   ]
 | 
						|
  },
 | 
						|
  {
 | 
						|
   "cell_type": "markdown",
 | 
						|
   "id": "5c4135d7",
 | 
						|
   "metadata": {},
 | 
						|
   "source": [
 | 
						|
    "## System Messages\n",
 | 
						|
    "\n",
 | 
						|
    "OpenAI provides an optional `name` parameter that they also recommend using in conjunction with system messages to do few shot prompting. Here is an example of how to do that below."
 | 
						|
   ]
 | 
						|
  },
 | 
						|
  {
 | 
						|
   "cell_type": "code",
 | 
						|
   "execution_count": 5,
 | 
						|
   "id": "1ba92d59",
 | 
						|
   "metadata": {},
 | 
						|
   "outputs": [],
 | 
						|
   "source": [
 | 
						|
    "template=\"You are a helpful assistant that translates english to pirate.\"\n",
 | 
						|
    "system_message_prompt = SystemMessagePromptTemplate.from_template(template)\n",
 | 
						|
    "example_human = SystemMessagePromptTemplate.from_template(\"Hi\", additional_kwargs={\"name\": \"example_user\"})\n",
 | 
						|
    "example_ai = SystemMessagePromptTemplate.from_template(\"Argh me mateys\", additional_kwargs={\"name\": \"example_assistant\"})\n",
 | 
						|
    "human_template=\"{text}\"\n",
 | 
						|
    "human_message_prompt = HumanMessagePromptTemplate.from_template(human_template)"
 | 
						|
   ]
 | 
						|
  },
 | 
						|
  {
 | 
						|
   "cell_type": "code",
 | 
						|
   "execution_count": 6,
 | 
						|
   "id": "56e488a7",
 | 
						|
   "metadata": {},
 | 
						|
   "outputs": [
 | 
						|
    {
 | 
						|
     "data": {
 | 
						|
      "text/plain": [
 | 
						|
       "\"I be lovin' programmin', me hearty.\""
 | 
						|
      ]
 | 
						|
     },
 | 
						|
     "execution_count": 6,
 | 
						|
     "metadata": {},
 | 
						|
     "output_type": "execute_result"
 | 
						|
    }
 | 
						|
   ],
 | 
						|
   "source": [
 | 
						|
    "chat_prompt = ChatPromptTemplate.from_messages([system_message_prompt, example_human, example_ai, human_message_prompt])\n",
 | 
						|
    "chain = LLMChain(llm=chat, prompt=chat_prompt)\n",
 | 
						|
    "# get a chat completion from the formatted messages\n",
 | 
						|
    "chain.run(\"I love programming.\")"
 | 
						|
   ]
 | 
						|
  }
 | 
						|
 ],
 | 
						|
 "metadata": {
 | 
						|
  "kernelspec": {
 | 
						|
   "display_name": "Python 3 (ipykernel)",
 | 
						|
   "language": "python",
 | 
						|
   "name": "python3"
 | 
						|
  },
 | 
						|
  "language_info": {
 | 
						|
   "codemirror_mode": {
 | 
						|
    "name": "ipython",
 | 
						|
    "version": 3
 | 
						|
   },
 | 
						|
   "file_extension": ".py",
 | 
						|
   "mimetype": "text/x-python",
 | 
						|
   "name": "python",
 | 
						|
   "nbconvert_exporter": "python",
 | 
						|
   "pygments_lexer": "ipython3",
 | 
						|
   "version": "3.9.1"
 | 
						|
  }
 | 
						|
 },
 | 
						|
 "nbformat": 4,
 | 
						|
 "nbformat_minor": 5
 | 
						|
}
 |