mirror of
https://github.com/hwchase17/langchain.git
synced 2026-02-03 15:55:44 +00:00
x
This commit is contained in:
@@ -3,18 +3,18 @@ from __future__ import annotations
|
||||
import json
|
||||
from typing import List, Sequence
|
||||
|
||||
from langchain.automaton.chat_agent import ChatAgent
|
||||
from langchain.automaton.typedefs import (
|
||||
AgentFinish,
|
||||
FunctionCall,
|
||||
MessageLike,
|
||||
FunctionResult,
|
||||
MessageLike,
|
||||
)
|
||||
from langchain.output_parsers.openai_functions import JsonOutputFunctionsParser
|
||||
from langchain.schema import Generation, AIMessage, BaseMessage, FunctionMessage
|
||||
from langchain.schema.output_parser import BaseGenerationOutputParser
|
||||
from langchain.automaton.chat_agent import ChatAgent
|
||||
from langchain.chat_models.openai import ChatOpenAI
|
||||
from langchain.tools import format_tool_to_openai_function, BaseTool
|
||||
from langchain.output_parsers.openai_functions import JsonOutputFunctionsParser
|
||||
from langchain.schema import AIMessage, BaseMessage, FunctionMessage, Generation
|
||||
from langchain.schema.output_parser import BaseGenerationOutputParser
|
||||
from langchain.tools import BaseTool, format_tool_to_openai_function
|
||||
|
||||
|
||||
class OpenAIFunctionsParser(BaseGenerationOutputParser):
|
||||
|
||||
@@ -6,7 +6,7 @@ from __future__ import annotations
|
||||
|
||||
import ast
|
||||
import re
|
||||
from typing import Sequence, Optional, Union, List
|
||||
from typing import List, Optional, Sequence, Union
|
||||
|
||||
from langchain.automaton.runnables import (
|
||||
create_llm_program,
|
||||
@@ -14,19 +14,19 @@ from langchain.automaton.runnables import (
|
||||
from langchain.automaton.tool_utils import generate_tool_info
|
||||
from langchain.automaton.typedefs import (
|
||||
Agent,
|
||||
MessageLog,
|
||||
MessageLike,
|
||||
AgentFinish,
|
||||
FunctionCall,
|
||||
FunctionResult,
|
||||
AgentFinish,
|
||||
MessageLike,
|
||||
MessageLog,
|
||||
PrimingMessage,
|
||||
)
|
||||
from langchain.prompts import SystemMessagePromptTemplate
|
||||
from langchain.schema import (
|
||||
PromptValue,
|
||||
AIMessage,
|
||||
BaseMessage,
|
||||
HumanMessage,
|
||||
AIMessage,
|
||||
PromptValue,
|
||||
SystemMessage,
|
||||
)
|
||||
from langchain.schema.language_model import BaseLanguageModel
|
||||
|
||||
@@ -2,15 +2,15 @@ from __future__ import annotations
|
||||
|
||||
import ast
|
||||
import re
|
||||
from typing import Sequence, List, Union
|
||||
from typing import List, Sequence, Union
|
||||
|
||||
from langchain.automaton.chat_agent import ChatAgent
|
||||
from langchain.automaton.tool_utils import generate_tool_info
|
||||
from langchain.automaton.typedefs import (
|
||||
MessageLike,
|
||||
AgentFinish,
|
||||
FunctionCall,
|
||||
FunctionResult,
|
||||
MessageLike,
|
||||
)
|
||||
from langchain.prompts import SystemMessagePromptTemplate
|
||||
from langchain.schema import BaseMessage, HumanMessage
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
"""Generalized chat agent, works with any chat model."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TypeVar, Callable, Optional, Sequence, Union, Iterator
|
||||
from typing import Callable, Iterator, Optional, Sequence, TypeVar, Union
|
||||
|
||||
from langchain.automaton.runnables import create_llm_program
|
||||
from langchain.automaton.typedefs import (
|
||||
Agent,
|
||||
AgentFinish,
|
||||
MessageLike,
|
||||
Agent,
|
||||
)
|
||||
from langchain.schema import PromptValue
|
||||
from langchain.schema.language_model import (
|
||||
BaseLanguageModel,
|
||||
LanguageModelOutput,
|
||||
LanguageModelInput,
|
||||
LanguageModelOutput,
|
||||
)
|
||||
from langchain.schema.messages import BaseMessage
|
||||
from langchain.schema.output_parser import BaseOutputParser
|
||||
@@ -25,7 +25,6 @@ from langchain.tools import BaseTool
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
|
||||
class ChatAgent(Agent):
|
||||
"""A generalized chat agent."""
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@
|
||||
"\n",
|
||||
" @tool\n",
|
||||
" def add(x: int, y: int) -> int:\n",
|
||||
" \"\"\"Use to add two numbers. For example; { \"x\": 2, \"y\": 10} \"\"\"\n",
|
||||
" \"\"\"Use to add two numbers. For example; { \"x\": 2, \"y\": 10}\"\"\"\n",
|
||||
" return x + y\n",
|
||||
"\n",
|
||||
" @tool\n",
|
||||
@@ -72,6 +72,7 @@
|
||||
"\n",
|
||||
" return list(locals().values())\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"tools = get_tools()"
|
||||
]
|
||||
},
|
||||
@@ -136,13 +137,18 @@
|
||||
"\n",
|
||||
"You can only take a single action at a time.\"\"\"\n",
|
||||
"\n",
|
||||
"messages = ChatPromptTemplate.from_messages([\n",
|
||||
" (\"system\", sys_msg),\n",
|
||||
" ('human', 'what is 5+8'),\n",
|
||||
" ('ai', '<action> {{ \"action\": \"add\", \"action_input\": {{ \"x\": 5, \"y\": 8 }} }} </action>'),\n",
|
||||
" ('human', 'Observation: 13'),\n",
|
||||
" ('ai', '5 + 8 is 13'),\n",
|
||||
"]).format_messages(**tool_info)"
|
||||
"messages = ChatPromptTemplate.from_messages(\n",
|
||||
" [\n",
|
||||
" (\"system\", sys_msg),\n",
|
||||
" (\"human\", \"what is 5+8\"),\n",
|
||||
" (\n",
|
||||
" \"ai\",\n",
|
||||
" '<action> {{ \"action\": \"add\", \"action_input\": {{ \"x\": 5, \"y\": 8 }} }} </action>',\n",
|
||||
" ),\n",
|
||||
" (\"human\", \"Observation: 13\"),\n",
|
||||
" (\"ai\", \"5 + 8 is 13\"),\n",
|
||||
" ]\n",
|
||||
").format_messages(**tool_info)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -282,7 +288,9 @@
|
||||
],
|
||||
"source": [
|
||||
"message_log = VerboseMessageLog(messages=messages)\n",
|
||||
"question = HumanMessage(content=\"Sum of first 10 numbers starting from 1? use one tool at a time\")\n",
|
||||
"question = HumanMessage(\n",
|
||||
" content=\"Sum of first 10 numbers starting from 1? use one tool at a time\"\n",
|
||||
")\n",
|
||||
"message_log.add_messages([question])\n",
|
||||
"\n",
|
||||
"agent.run(message_log)"
|
||||
@@ -636,25 +644,24 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"\n",
|
||||
"# Generate HTML divs\n",
|
||||
"html_divs = []\n",
|
||||
"for idx, msg in enumerate(messages):\n",
|
||||
" background_color = \"DodgerBlue\" if idx % 2 == 0 else 'Tomato'\n",
|
||||
" div_content = f'''\n",
|
||||
" background_color = \"DodgerBlue\" if idx % 2 == 0 else \"Tomato\"\n",
|
||||
" div_content = f\"\"\"\n",
|
||||
" <div style=\"border: 1px solid black; padding: 10px; background-color:{background_color}\">\n",
|
||||
" <div style=\"display: flex;\">\n",
|
||||
" <div style=\"font-weight: bold; margin-right: 10px;\">{html.escape(msg.type)}</div>\n",
|
||||
" <div>{html.escape(msg.content)}</div>\n",
|
||||
" </div>\n",
|
||||
" </div>\n",
|
||||
"'''\n",
|
||||
"\"\"\"\n",
|
||||
" html_divs.append(div_content)\n",
|
||||
"\n",
|
||||
"# Display HTML divs in a Jupyter Notebook cell\n",
|
||||
"from IPython.display import HTML, display\n",
|
||||
"\n",
|
||||
"html_output = '\\n'.join(html_divs)\n",
|
||||
"html_output = \"\\n\".join(html_divs)\n",
|
||||
"display(HTML(html_output))"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"sys.path.insert(0, '/home/eugene/src/langchain/libs/langchain/')"
|
||||
"sys.path.insert(0, \"/home/eugene/src/langchain/libs/langchain/\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -48,6 +48,7 @@
|
||||
"source": [
|
||||
"from typing import List, Optional, Any\n",
|
||||
"from langchain.tools import tool, Tool, format_tool_to_openai_function\n",
|
||||
"from langchain.automaton.agent_implementations.openai_agent import create_openai_agent\n",
|
||||
"from langchain.schema.messages import AIMessage, HumanMessage\n",
|
||||
"from langchain.chat_models.openai import ChatOpenAI\n",
|
||||
"from langchain.prompts import ChatPromptTemplate\n",
|
||||
@@ -78,7 +79,7 @@
|
||||
"\n",
|
||||
" @tool\n",
|
||||
" def add(x: int, y: int) -> int:\n",
|
||||
" \"\"\"Use to add two numbers. For example; { \"x\": 2, \"y\": 10} \"\"\"\n",
|
||||
" \"\"\"Use to add two numbers. For example; { \"x\": 2, \"y\": 10}\"\"\"\n",
|
||||
" return x + y\n",
|
||||
"\n",
|
||||
" @tool\n",
|
||||
@@ -93,6 +94,7 @@
|
||||
"\n",
|
||||
" return list(locals().values())\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"tools = get_tools()"
|
||||
]
|
||||
},
|
||||
@@ -124,7 +126,13 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"a = agent.invoke([HumanMessage(content=\"Sum of first 10 numbers starting from 1? use one tool at a time\")])"
|
||||
"a = agent.invoke(\n",
|
||||
" [\n",
|
||||
" HumanMessage(\n",
|
||||
" content=\"Sum of first 10 numbers starting from 1? use one tool at a time\"\n",
|
||||
" )\n",
|
||||
" ]\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -27,7 +27,11 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import List, Optional, Any\n",
|
||||
"from langchain.automaton.think_act_agent import ThinkActAgent, generate_memory, ThinkActPromptGenerator\n",
|
||||
"from langchain.automaton.think_act_agent import (\n",
|
||||
" ThinkActAgent,\n",
|
||||
" generate_memory,\n",
|
||||
" ThinkActPromptGenerator,\n",
|
||||
")\n",
|
||||
"from langchain.schema.messages import HumanMessage\n",
|
||||
"from langchain.llms import OpenAI\n",
|
||||
"from langchain.tools import tool, Tool"
|
||||
@@ -53,7 +57,7 @@
|
||||
"\n",
|
||||
" @tool\n",
|
||||
" def add(x: int, y: int) -> int:\n",
|
||||
" \"\"\"Use to add two numbers. For example; { \"x\": 2, \"y\": 10} \"\"\"\n",
|
||||
" \"\"\"Use to add two numbers. For example; { \"x\": 2, \"y\": 10}\"\"\"\n",
|
||||
" return x + y\n",
|
||||
"\n",
|
||||
" @tool\n",
|
||||
@@ -63,6 +67,7 @@
|
||||
"\n",
|
||||
" return list(locals().values())\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"tools = get_tools()"
|
||||
]
|
||||
},
|
||||
@@ -85,7 +90,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"message_log = generate_memory(tools)\n",
|
||||
"question = HumanMessage(content='what is the 2 + 5?')\n",
|
||||
"question = HumanMessage(content=\"what is the 2 + 5?\")\n",
|
||||
"message_log.add_messages([question])\n",
|
||||
"agent.run(message_log)"
|
||||
]
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
"""Module contains useful runnables for agents."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Callable, TypeVar, Dict, Union, Sequence, List, Optional, Any
|
||||
from typing import Any, Callable, Dict, List, Optional, Sequence, TypeVar, Union
|
||||
|
||||
from langchain.automaton.typedefs import (
|
||||
MessageLike,
|
||||
FunctionResult,
|
||||
FunctionCall,
|
||||
FunctionResult,
|
||||
MessageLike,
|
||||
)
|
||||
from langchain.callbacks.manager import CallbackManagerForChainRun
|
||||
from langchain.schema import BaseMessage, AIMessage, PromptValue
|
||||
from langchain.schema import AIMessage, BaseMessage, PromptValue
|
||||
from langchain.schema.language_model import BaseLanguageModel
|
||||
from langchain.schema.output_parser import BaseOutputParser
|
||||
from langchain.schema.runnable import (
|
||||
RunnableLambda,
|
||||
Runnable,
|
||||
RunnablePassthrough,
|
||||
RunnableMap,
|
||||
RunnableConfig,
|
||||
RunnableLambda,
|
||||
RunnableMap,
|
||||
RunnablePassthrough,
|
||||
patch_config,
|
||||
)
|
||||
from langchain.tools import BaseTool
|
||||
|
||||
@@ -10,16 +10,16 @@ from langchain.automaton.tests.utils import (
|
||||
construct_func_invocation_message,
|
||||
)
|
||||
from langchain.automaton.typedefs import (
|
||||
AgentFinish,
|
||||
FunctionCall,
|
||||
FunctionResult,
|
||||
MessageLog,
|
||||
AgentFinish,
|
||||
)
|
||||
from langchain.schema.messages import (
|
||||
AIMessage,
|
||||
SystemMessage,
|
||||
)
|
||||
from langchain.tools import tool, Tool
|
||||
from langchain.tools import Tool, tool
|
||||
from langchain.tools.base import BaseTool
|
||||
|
||||
|
||||
|
||||
@@ -4,8 +4,8 @@ from typing import Any, List, Optional, cast
|
||||
import pytest
|
||||
|
||||
from langchain.automaton.runnables import (
|
||||
_apply_and_concat,
|
||||
RunnablePassthrough,
|
||||
_apply_and_concat,
|
||||
_to_list,
|
||||
_to_runnable_parser,
|
||||
create_llm_program,
|
||||
@@ -14,9 +14,9 @@ from langchain.automaton.tests.utils import (
|
||||
FakeChatModel,
|
||||
)
|
||||
from langchain.automaton.typedefs import FunctionCall, FunctionResult, MessageLike
|
||||
from langchain.schema.messages import HumanMessage, AIMessage, BaseMessage
|
||||
from langchain.schema.runnable import RunnableLambda
|
||||
from langchain.schema.language_model import BaseLanguageModel
|
||||
from langchain.schema.messages import AIMessage, BaseMessage, HumanMessage
|
||||
from langchain.schema.runnable import RunnableLambda
|
||||
from langchain.tools import BaseTool, tool
|
||||
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import Iterator, List, Any, Mapping
|
||||
from typing import Any, Iterator, List, Mapping
|
||||
|
||||
from langchain.callbacks.manager import CallbackManagerForLLMRun
|
||||
from langchain.chat_models.base import BaseChatModel
|
||||
from langchain.schema import BaseMessage, ChatResult, ChatGeneration, AIMessage
|
||||
from langchain.schema import AIMessage, BaseMessage, ChatGeneration, ChatResult
|
||||
from langchain.tools import BaseTool
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Optional, Sequence, Mapping, overload, Union, Iterator
|
||||
from typing import Any, Iterator, Mapping, Optional, Sequence, Union, overload
|
||||
|
||||
from langchain.load.serializable import Serializable
|
||||
from langchain.schema import (
|
||||
|
||||
Reference in New Issue
Block a user