Compare commits

...

8 Commits

Author SHA1 Message Date
Bagatur
bb8aa15d19 Merge branch 'master' into add-run-id-in-chain 2024-05-02 13:08:27 -04:00
HoangNguyen689
af5f1fe4ba Merge branch 'master' into add-run-id-in-chain 2024-05-02 08:21:19 +07:00
HoangNguyen689
298f0363f7 fix lint 2024-05-02 08:20:54 +07:00
HoangNguyen689
b949b35948 Merge branch 'master' into add-run-id-in-chain 2024-04-30 13:20:14 +07:00
HoangNguyen689
e4a38cd4a0 Merge branch 'master' into add-run-id-in-chain 2024-04-26 08:49:07 +07:00
HoangNguyen689
2386d31382 Add run_id on async func and stream as well 2024-04-26 08:11:24 +07:00
Bagatur
790936a6a0 Merge branch 'master' into add-run-id-in-chain 2024-04-25 11:33:57 -07:00
HoangNguyen689
133bea1cb9 pass run id from config
Signed-off-by: HoangNguyen689 <ndhoang.bk.hedspi@gmail.com>
2024-04-25 22:16:09 +07:00
3 changed files with 25 additions and 11 deletions

View File

@@ -1,4 +1,5 @@
"""Chain that takes in an input and produces an action and action input."""
from __future__ import annotations
import asyncio
@@ -346,11 +347,11 @@ class RunnableAgent(BaseSingleActionAgent):
input_keys_arg: List[str] = []
return_keys_arg: List[str] = []
stream_runnable: bool = True
"""Whether to stream from the runnable or not.
"""Whether to stream from the runnable or not.
If True then underlying LLM is invoked in a streaming fashion to make it possible
to get access to the individual LLM tokens when using stream_log with the Agent
Executor. If False then LLM is invoked in a non-streaming fashion and
If True then underlying LLM is invoked in a streaming fashion to make it possible
to get access to the individual LLM tokens when using stream_log with the Agent
Executor. If False then LLM is invoked in a non-streaming fashion and
individual LLM tokens will not be available in stream_log.
"""
@@ -455,11 +456,11 @@ class RunnableMultiActionAgent(BaseMultiActionAgent):
input_keys_arg: List[str] = []
return_keys_arg: List[str] = []
stream_runnable: bool = True
"""Whether to stream from the runnable or not.
If True then underlying LLM is invoked in a streaming fashion to make it possible
to get access to the individual LLM tokens when using stream_log with the Agent
Executor. If False then LLM is invoked in a non-streaming fashion and
"""Whether to stream from the runnable or not.
If True then underlying LLM is invoked in a streaming fashion to make it possible
to get access to the individual LLM tokens when using stream_log with the Agent
Executor. If False then LLM is invoked in a non-streaming fashion and
individual LLM tokens will not be available in stream_log.
"""
@@ -926,7 +927,7 @@ class AgentExecutor(Chain):
max_iterations: Optional[int] = 15
"""The maximum number of steps to take before ending the execution
loop.
Setting to 'None' could lead to an infinite loop."""
max_execution_time: Optional[float] = None
"""The maximum amount of wall clock time to spend in the execution
@@ -938,7 +939,7 @@ class AgentExecutor(Chain):
`"force"` returns a string saying that it stopped because it met a
time or iteration limit.
`"generate"` calls the agent's LLM Chain one final time to generate
a final answer based on the previous steps.
"""
@@ -1565,6 +1566,7 @@ class AgentExecutor(Chain):
tags=config.get("tags"),
metadata=config.get("metadata"),
run_name=config.get("run_name"),
run_id=config.get("run_id"),
yield_actions=True,
**kwargs,
)
@@ -1586,6 +1588,7 @@ class AgentExecutor(Chain):
tags=config.get("tags"),
metadata=config.get("metadata"),
run_name=config.get("run_name"),
run_id=config.get("run_id"),
yield_actions=True,
**kwargs,
)

View File

@@ -14,6 +14,7 @@ from typing import (
Tuple,
Union,
)
from uuid import UUID
from langchain_core.agents import (
AgentAction,
@@ -54,6 +55,7 @@ class AgentExecutorIterator:
tags: Optional[list[str]] = None,
metadata: Optional[Dict[str, Any]] = None,
run_name: Optional[str] = None,
run_id: Optional[UUID] = None,
include_run_info: bool = False,
yield_actions: bool = False,
):
@@ -67,6 +69,7 @@ class AgentExecutorIterator:
self.tags = tags
self.metadata = metadata
self.run_name = run_name
self.run_id = run_id
self.include_run_info = include_run_info
self.yield_actions = yield_actions
self.reset()
@@ -76,6 +79,7 @@ class AgentExecutorIterator:
tags: Optional[list[str]]
metadata: Optional[Dict[str, Any]]
run_name: Optional[str]
run_id: Optional[UUID]
include_run_info: bool
yield_actions: bool
@@ -162,6 +166,7 @@ class AgentExecutorIterator:
run_manager = callback_manager.on_chain_start(
dumpd(self.agent_executor),
self.inputs,
self.run_id,
name=self.run_name,
)
try:
@@ -227,6 +232,7 @@ class AgentExecutorIterator:
run_manager = await callback_manager.on_chain_start(
dumpd(self.agent_executor),
self.inputs,
self.run_id,
name=self.run_name,
)
try:

View File

@@ -1,4 +1,5 @@
"""Base interface that all chains should implement."""
import inspect
import json
import logging
@@ -127,6 +128,7 @@ class Chain(RunnableSerializable[Dict[str, Any], Dict[str, Any]], ABC):
tags = config.get("tags")
metadata = config.get("metadata")
run_name = config.get("run_name") or self.get_name()
run_id = config.get("run_id")
include_run_info = kwargs.get("include_run_info", False)
return_only_outputs = kwargs.get("return_only_outputs", False)
@@ -145,6 +147,7 @@ class Chain(RunnableSerializable[Dict[str, Any], Dict[str, Any]], ABC):
run_manager = callback_manager.on_chain_start(
dumpd(self),
inputs,
run_id,
name=run_name,
)
try:
@@ -178,6 +181,7 @@ class Chain(RunnableSerializable[Dict[str, Any], Dict[str, Any]], ABC):
tags = config.get("tags")
metadata = config.get("metadata")
run_name = config.get("run_name") or self.get_name()
run_id = config.get("run_id")
include_run_info = kwargs.get("include_run_info", False)
return_only_outputs = kwargs.get("return_only_outputs", False)
@@ -195,6 +199,7 @@ class Chain(RunnableSerializable[Dict[str, Any], Dict[str, Any]], ABC):
run_manager = await callback_manager.on_chain_start(
dumpd(self),
inputs,
run_id,
name=run_name,
)
try: