mirror of
https://github.com/hwchase17/langchain.git
synced 2026-01-29 21:30:18 +00:00
support adding custom metadata to runs (#7120)
- [x] wire up tools - [x] wire up retrievers - [x] add integration test <!-- Thank you for contributing to LangChain! Replace this comment with: - Description: a description of the change, - Issue: the issue # it fixes (if applicable), - Dependencies: any dependencies required for this change, - Tag maintainer: for a quicker response, tag the relevant maintainer (see below), - Twitter handle: we announce bigger features on Twitter. If your PR gets announced and you'd like a mention, we'll gladly shout you out! If you're adding a new integration, please include: 1. a test for the integration, preferably unit tests that do not rely on network access, 2. an example notebook showing its use. Maintainer responsibilities: - General / Misc / if you don't know who to tag: @baskaryan - DataLoaders / VectorStores / Retrievers: @rlancemartin, @eyurtsev - Models / Prompts: @hwchase17, @baskaryan - Memory: @hwchase17 - Agents / Tools / Toolkits: @hinthornw - Tracing / Callbacks: @agola11 - Async: @agola11 If no one reviews your PR within a few days, feel free to @-mention the same people again. See contribution guidelines for more information on how to write/run tests, lint, etc: https://github.com/hwchase17/langchain/blob/master/.github/CONTRIBUTING.md -->
This commit is contained in:
@@ -181,6 +181,40 @@ def test_tracing_v2_chain_with_tags() -> None:
|
||||
chain.run("what is the meaning of life", tags=["a-tag"])
|
||||
|
||||
|
||||
def test_tracing_v2_agent_with_metadata() -> None:
|
||||
os.environ["LANGCHAIN_TRACING_V2"] = "true"
|
||||
llm = OpenAI(temperature=0)
|
||||
chat = ChatOpenAI(temperature=0)
|
||||
tools = load_tools(["llm-math", "serpapi"], llm=llm)
|
||||
agent = initialize_agent(
|
||||
tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True
|
||||
)
|
||||
chat_agent = initialize_agent(
|
||||
tools, chat, agent=AgentType.CHAT_ZERO_SHOT_REACT_DESCRIPTION, verbose=True
|
||||
)
|
||||
agent.run(questions[0], tags=["a-tag"], metadata={"a": "b", "c": "d"})
|
||||
chat_agent.run(questions[0], tags=["a-tag"], metadata={"a": "b", "c": "d"})
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_tracing_v2_async_agent_with_metadata() -> None:
|
||||
os.environ["LANGCHAIN_TRACING_V2"] = "true"
|
||||
llm = OpenAI(temperature=0, metadata={"f": "g", "h": "i"})
|
||||
chat = ChatOpenAI(temperature=0, metadata={"f": "g", "h": "i"})
|
||||
async_tools = load_tools(["llm-math", "serpapi"], llm=llm)
|
||||
agent = initialize_agent(
|
||||
async_tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True
|
||||
)
|
||||
chat_agent = initialize_agent(
|
||||
async_tools,
|
||||
chat,
|
||||
agent=AgentType.CHAT_ZERO_SHOT_REACT_DESCRIPTION,
|
||||
verbose=True,
|
||||
)
|
||||
await agent.arun(questions[0], tags=["a-tag"], metadata={"a": "b", "c": "d"})
|
||||
await chat_agent.arun(questions[0], tags=["a-tag"], metadata={"a": "b", "c": "d"})
|
||||
|
||||
|
||||
def test_trace_as_group() -> None:
|
||||
llm = OpenAI(temperature=0.9)
|
||||
prompt = PromptTemplate(
|
||||
|
||||
Reference in New Issue
Block a user