add metadata

This commit is contained in:
Tanushree Sharma
2026-03-12 17:12:55 -07:00
parent cee6430b1c
commit 326335e649
4 changed files with 24 additions and 11 deletions

View File

@@ -69,6 +69,8 @@ class LangSmithParams(TypedDict, total=False):
ls_stop: list[str] | None
"""Stop words for generation."""
ls_integration: str
"""Integration that created the trace."""
@cache # Cache the tokenizer
@@ -299,6 +301,16 @@ class BaseLanguageModel(
# generate responses that match a given schema.
raise NotImplementedError
def _get_ls_params_with_defaults(
self,
stop: list[str] | None = None,
**kwargs: Any,
) -> LangSmithParams:
"""Wrap _get_ls_params to always include ls_integration."""
ls_params = self._get_ls_params(stop=stop, **kwargs)
ls_params["ls_integration"] = "langchain"
return ls_params
@property
def _identifying_params(self) -> Mapping[str, Any]:
"""Get the identifying parameters."""

View File

@@ -505,7 +505,7 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC):
options = {"stop": stop, **kwargs, **ls_structured_output_format_dict}
inheritable_metadata = {
**(config.get("metadata") or {}),
**self._get_ls_params(stop=stop, **kwargs),
**self._get_ls_params_with_defaults(stop=stop, **kwargs),
}
callback_manager = CallbackManager.configure(
config.get("callbacks"),
@@ -633,7 +633,7 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC):
options = {"stop": stop, **kwargs, **ls_structured_output_format_dict}
inheritable_metadata = {
**(config.get("metadata") or {}),
**self._get_ls_params(stop=stop, **kwargs),
**self._get_ls_params_with_defaults(stop=stop, **kwargs),
}
callback_manager = AsyncCallbackManager.configure(
config.get("callbacks"),
@@ -899,7 +899,7 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC):
options = {"stop": stop, **ls_structured_output_format_dict}
inheritable_metadata = {
**(metadata or {}),
**self._get_ls_params(stop=stop, **kwargs),
**self._get_ls_params_with_defaults(stop=stop, **kwargs),
}
callback_manager = CallbackManager.configure(
@@ -1022,7 +1022,7 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC):
options = {"stop": stop, **ls_structured_output_format_dict}
inheritable_metadata = {
**(metadata or {}),
**self._get_ls_params(stop=stop, **kwargs),
**self._get_ls_params_with_defaults(stop=stop, **kwargs),
}
callback_manager = AsyncCallbackManager.configure(

View File

@@ -527,7 +527,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
options = {"stop": stop}
inheritable_metadata = {
**(config.get("metadata") or {}),
**self._get_ls_params(stop=stop, **kwargs),
**self._get_ls_params_with_defaults(stop=stop, **kwargs),
}
callback_manager = CallbackManager.configure(
config.get("callbacks"),
@@ -597,7 +597,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
options = {"stop": stop}
inheritable_metadata = {
**(config.get("metadata") or {}),
**self._get_ls_params(stop=stop, **kwargs),
**self._get_ls_params_with_defaults(stop=stop, **kwargs),
}
callback_manager = AsyncCallbackManager.configure(
config.get("callbacks"),
@@ -906,14 +906,14 @@ class BaseLLM(BaseLanguageModel[str], ABC):
metadata = [
{
**(meta or {}),
**self._get_ls_params(stop=stop, **kwargs),
**self._get_ls_params_with_defaults(stop=stop, **kwargs),
}
for meta in metadata
]
elif isinstance(metadata, dict):
metadata = {
**(metadata or {}),
**self._get_ls_params(stop=stop, **kwargs),
**self._get_ls_params_with_defaults(stop=stop, **kwargs),
}
if (
isinstance(callbacks, list)
@@ -1173,14 +1173,14 @@ class BaseLLM(BaseLanguageModel[str], ABC):
metadata = [
{
**(meta or {}),
**self._get_ls_params(stop=stop, **kwargs),
**self._get_ls_params_with_defaults(stop=stop, **kwargs),
}
for meta in metadata
]
elif isinstance(metadata, dict):
metadata = {
**(metadata or {}),
**self._get_ls_params(stop=stop, **kwargs),
**self._get_ls_params_with_defaults(stop=stop, **kwargs),
}
# Create callback managers
if isinstance(callbacks, list) and (

View File

@@ -1630,8 +1630,9 @@ def create_agent(
)
config: RunnableConfig = {"recursion_limit": 10_000}
config["metadata"] = {"ls_integration": "langchain"}
if name:
config["metadata"] = {"lc_agent_name": name}
config["metadata"]["lc_agent_name"] = name
return graph.compile(
checkpointer=checkpointer,