mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-23 19:39:58 +00:00
core: Assign missing message ids in BaseChatModel (#19863)
- This ensures ids are stable across streamed chunks - Multiple messages in batch call get separate ids - Also fix ids being dropped when combining message chunks Thank you for contributing to LangChain! - [ ] **PR title**: "package: description" - Where "package" is whichever of langchain, community, core, experimental, etc. is being modified. Use "docs: ..." for purely docs changes, "templates: ..." for template changes, "infra: ..." for CI changes. - Example: "community: add foobar LLM" - [ ] **PR message**: ***Delete this entire checklist*** and replace with - **Description:** a description of the change - **Issue:** the issue # it fixes, if applicable - **Dependencies:** any dependencies required for this change - **Twitter handle:** if your PR gets announced, and you'd like a mention, we'll gladly shout you out! - [ ] **Add tests and docs**: If you're adding a new integration, please include 1. a test for the integration, preferably unit tests that do not rely on network access, 2. an example notebook showing its use. It lives in `docs/docs/integrations` directory. - [ ] **Lint and test**: Run `make format`, `make lint` and `make test` from the root of the package(s) you've modified. See contribution guidelines for more: https://python.langchain.com/docs/contributing/ Additional guidelines: - Make sure optional dependencies are imported within a function. - Please do not add dependencies to pyproject.toml files (even optional ones) unless they are required for unit tests. - Most PRs should not touch more than one package. - Changes should be backwards compatible. - If you are adding something to community, do not re-import it in langchain. If no one reviews your PR within a few days, please @-mention one of baskaryan, efriis, eyurtsev, hwchase17.
This commit is contained in:
@@ -116,7 +116,9 @@ def node_data_str(node: Node) -> str:
|
||||
return data if not data.startswith("Runnable") else data[8:]
|
||||
|
||||
|
||||
def node_data_json(node: Node) -> Dict[str, Union[str, Dict[str, Any]]]:
|
||||
def node_data_json(
|
||||
node: Node, *, with_schemas: bool = False
|
||||
) -> Dict[str, Union[str, Dict[str, Any]]]:
|
||||
from langchain_core.load.serializable import to_json_not_implemented
|
||||
from langchain_core.runnables.base import Runnable, RunnableSerializable
|
||||
|
||||
@@ -137,10 +139,17 @@ def node_data_json(node: Node) -> Dict[str, Union[str, Dict[str, Any]]]:
|
||||
},
|
||||
}
|
||||
elif inspect.isclass(node.data) and issubclass(node.data, BaseModel):
|
||||
return {
|
||||
"type": "schema",
|
||||
"data": node.data.schema(),
|
||||
}
|
||||
return (
|
||||
{
|
||||
"type": "schema",
|
||||
"data": node.data.schema(),
|
||||
}
|
||||
if with_schemas
|
||||
else {
|
||||
"type": "schema",
|
||||
"data": node_data_str(node),
|
||||
}
|
||||
)
|
||||
else:
|
||||
return {
|
||||
"type": "unknown",
|
||||
@@ -156,7 +165,7 @@ class Graph:
|
||||
edges: List[Edge] = field(default_factory=list)
|
||||
branches: Optional[Dict[str, List[Branch]]] = field(default_factory=dict)
|
||||
|
||||
def to_json(self) -> Dict[str, List[Dict[str, Any]]]:
|
||||
def to_json(self, *, with_schemas: bool = False) -> Dict[str, List[Dict[str, Any]]]:
|
||||
"""Convert the graph to a JSON-serializable format."""
|
||||
stable_node_ids = {
|
||||
node.id: i if is_uuid(node.id) else node.id
|
||||
@@ -165,7 +174,10 @@ class Graph:
|
||||
|
||||
return {
|
||||
"nodes": [
|
||||
{"id": stable_node_ids[node.id], **node_data_json(node)}
|
||||
{
|
||||
"id": stable_node_ids[node.id],
|
||||
**node_data_json(node, with_schemas=with_schemas),
|
||||
}
|
||||
for node in self.nodes.values()
|
||||
],
|
||||
"edges": [
|
||||
|
Reference in New Issue
Block a user