mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-05 04:55:14 +00:00
Templates (#12294)
Co-authored-by: Harrison Chase <hw.chase.17@gmail.com> Co-authored-by: Lance Martin <lance@langchain.dev> Co-authored-by: Jacob Lee <jacoblee93@gmail.com>
This commit is contained in:
21
templates/openai-functions-agent/LICENSE
Normal file
21
templates/openai-functions-agent/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 LangChain, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
16
templates/openai-functions-agent/README.md
Normal file
16
templates/openai-functions-agent/README.md
Normal file
@@ -0,0 +1,16 @@
|
||||
# OpenAI Functions Agent
|
||||
|
||||
This template creates an agent that uses OpenAI function calling to communicate its decisions of what actions to take.
|
||||
This example creates an agent that can optionally look up things on the internet using Tavily's search engine.
|
||||
|
||||
## LLM
|
||||
|
||||
This template will use `OpenAI` by default.
|
||||
|
||||
Be sure that `OPENAI_API_KEY` is set in your environment.
|
||||
|
||||
## Tools
|
||||
|
||||
This template will use `Tavily` by default.
|
||||
|
||||
Be sure that `TAVILY_API_KEY` is set in your environment.
|
5
templates/openai-functions-agent/main.py
Normal file
5
templates/openai-functions-agent/main.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from openai_functions_agent.agent import agent_executor
|
||||
|
||||
if __name__ == "__main__":
|
||||
question = "who won the womens world cup in 2023?"
|
||||
print(agent_executor.invoke({"input": question, "chat_history": []}))
|
@@ -0,0 +1,3 @@
|
||||
from openai_functions_agent.agent import agent_executor
|
||||
|
||||
__all__ = ["agent_executor"]
|
@@ -0,0 +1,54 @@
|
||||
from typing import List, Tuple
|
||||
from langchain.schema.messages import HumanMessage, AIMessage
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
from langchain.agents import AgentExecutor
|
||||
from langchain.utilities.tavily_search import TavilySearchAPIWrapper
|
||||
from langchain.tools.tavily_search import TavilySearchResults
|
||||
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
|
||||
from langchain.tools.render import format_tool_to_openai_function
|
||||
from langchain.agents.format_scratchpad import format_to_openai_functions
|
||||
from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser
|
||||
from langchain.pydantic_v1 import BaseModel
|
||||
|
||||
|
||||
# Fake Tool
|
||||
search = TavilySearchAPIWrapper()
|
||||
tavily_tool = TavilySearchResults(api_wrapper=search)
|
||||
|
||||
tools = [tavily_tool]
|
||||
|
||||
llm = ChatOpenAI(temperature=0)
|
||||
prompt = ChatPromptTemplate.from_messages([
|
||||
("system", "You are very powerful assistant, but bad at calculating lengths of words."),
|
||||
MessagesPlaceholder(variable_name="chat_history"),
|
||||
("user", "{input}"),
|
||||
MessagesPlaceholder(variable_name="agent_scratchpad"),
|
||||
])
|
||||
|
||||
llm_with_tools = llm.bind(
|
||||
functions=[format_tool_to_openai_function(t) for t in tools]
|
||||
)
|
||||
|
||||
def _format_chat_history(chat_history: List[Tuple[str, str]]):
|
||||
buffer = []
|
||||
for human, ai in chat_history:
|
||||
buffer.append(HumanMessage(content=human))
|
||||
buffer.append(AIMessage(content=ai))
|
||||
return buffer
|
||||
|
||||
|
||||
agent = {
|
||||
"input": lambda x: x["input"],
|
||||
"chat_history": lambda x: _format_chat_history(x['chat_history']),
|
||||
"agent_scratchpad": lambda x: format_to_openai_functions(x['intermediate_steps']),
|
||||
} | prompt | llm_with_tools | OpenAIFunctionsAgentOutputParser()
|
||||
|
||||
class AgentInput(BaseModel):
|
||||
input: str
|
||||
chat_history: List[Tuple[str, str]]
|
||||
|
||||
agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True).with_types(
|
||||
input_type=AgentInput
|
||||
)
|
||||
|
||||
agent_executor = agent_executor | (lambda x: x["output"])
|
1254
templates/openai-functions-agent/poetry.lock
generated
Normal file
1254
templates/openai-functions-agent/poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
20
templates/openai-functions-agent/pyproject.toml
Normal file
20
templates/openai-functions-agent/pyproject.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[tool.poetry]
|
||||
name = "openai-functions-agent"
|
||||
version = "0.1.0"
|
||||
description = ""
|
||||
authors = ["Lance Martin <lance@langchain.dev>"]
|
||||
readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.8.1,<4.0"
|
||||
langchain = ">=0.0.322"
|
||||
openai = ">=0.5.0"
|
||||
tavily-python = "^0.1.9"
|
||||
|
||||
[tool.langserve]
|
||||
export_module = "openai_functions_agent"
|
||||
export_attr = "agent_executor"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
0
templates/openai-functions-agent/tests/__init__.py
Normal file
0
templates/openai-functions-agent/tests/__init__.py
Normal file
Reference in New Issue
Block a user