core[patch], community[patch]: mark runnable context, lc load as beta (#15603)

This commit is contained in:
Bagatur
2024-01-05 17:54:26 -05:00
committed by GitHub
parent 75281af822
commit a7d023aaf0
11 changed files with 70 additions and 16 deletions

View File

@@ -1,7 +1,15 @@
from importlib import metadata
from langchain_core._api import (
surface_langchain_beta_warnings,
surface_langchain_deprecation_warnings,
)
try:
__version__ = metadata.version(__package__)
except metadata.PackageNotFoundError:
# Case where package metadata is not available.
__version__ = ""
surface_langchain_deprecation_warnings()
surface_langchain_beta_warnings()

View File

@@ -8,7 +8,12 @@ This module is only relevant for LangChain developers, not for users.
in your own code. We may change the API at any time with no warning.
"""
from .beta_decorator import (
LangChainBetaWarning,
beta,
suppress_langchain_beta_warning,
surface_langchain_beta_warnings,
)
from .deprecation import (
LangChainDeprecationWarning,
deprecated,
@@ -20,9 +25,13 @@ from .path import as_import_path, get_relative_path
__all__ = [
"as_import_path",
"beta",
"deprecated",
"get_relative_path",
"LangChainBetaWarning",
"LangChainDeprecationWarning",
"suppress_langchain_beta_warning",
"surface_langchain_beta_warnings",
"suppress_langchain_deprecation_warning",
"surface_langchain_deprecation_warnings",
"warn_deprecated",

View File

@@ -18,6 +18,7 @@ from typing import (
Union,
)
from langchain_core._api.beta_decorator import beta
from langchain_core.runnables.base import (
Runnable,
RunnableSerializable,
@@ -156,6 +157,7 @@ def config_with_context(
return _config_with_context(config, steps, _setter, _getter, threading.Event)
@beta()
class ContextGet(RunnableSerializable):
"""Get a context value."""
@@ -219,6 +221,7 @@ def _coerce_set_value(value: SetValue) -> Runnable[Input, Output]:
return coerce_to_runnable(value)
@beta()
class ContextSet(RunnableSerializable):
"""Set a context value."""

View File

@@ -3,6 +3,7 @@ import json
import os
from typing import Any, Dict, List, Optional
from langchain_core._api import beta, suppress_langchain_beta_warning
from langchain_core.load.mapping import (
OLD_PROMPT_TEMPLATE_FORMATS,
SERIALIZABLE_MAPPING,
@@ -102,6 +103,7 @@ class Reviver:
return value
@beta()
def loads(
text: str,
*,
@@ -123,6 +125,17 @@ def loads(
return json.loads(text, object_hook=Reviver(secrets_map, valid_namespaces))
def _loads_suppress_warning(
text: str,
*,
secrets_map: Optional[Dict[str, str]] = None,
valid_namespaces: Optional[List[str]] = None,
) -> Any:
with suppress_langchain_beta_warning():
return loads(text, secrets_map=secrets_map, valid_namespaces=valid_namespaces)
@beta()
def load(
obj: Any,
*,
@@ -153,3 +166,13 @@ def load(
return obj
return _load(obj)
def _load_suppress_warning(
obj: Any,
*,
secrets_map: Optional[Dict[str, str]] = None,
valid_namespaces: Optional[List[str]] = None,
) -> Any:
with suppress_langchain_beta_warning():
return load(obj, secrets_map=secrets_map, valid_namespaces=valid_namespaces)

View File

@@ -14,7 +14,7 @@ from typing import (
)
from langchain_core.chat_history import BaseChatMessageHistory
from langchain_core.load import load
from langchain_core.load.load import _load_suppress_warning
from langchain_core.pydantic_v1 import BaseModel, create_model
from langchain_core.runnables.base import Runnable, RunnableBindingBase, RunnableLambda
from langchain_core.runnables.config import run_in_executor
@@ -337,7 +337,7 @@ class RunnableWithMessageHistory(RunnableBindingBase):
hist = config["configurable"]["message_history"]
# Get the input messages
inputs = load(run.inputs)
inputs = _load_suppress_warning(run.inputs)
input_val = inputs[self.input_messages_key or "input"]
input_messages = self._get_input_messages(input_val)
@@ -348,7 +348,7 @@ class RunnableWithMessageHistory(RunnableBindingBase):
input_messages = input_messages[len(historic_messages) :]
# Get the output messages
output_val = load(run.outputs)
output_val = _load_suppress_warning(run.outputs)
output_messages = self._get_output_messages(output_val)
for m in input_messages + output_messages:

View File

@@ -19,7 +19,7 @@ from uuid import UUID
import jsonpatch # type: ignore[import]
from anyio import create_memory_object_stream
from langchain_core.load import load
from langchain_core.load.load import _load_suppress_warning
from langchain_core.outputs import ChatGenerationChunk, GenerationChunk
from langchain_core.tracers.base import BaseTracer
from langchain_core.tracers.schemas import Run
@@ -267,7 +267,7 @@ class LogStreamCallbackHandler(BaseTracer):
"op": "add",
"path": f"/logs/{index}/final_output",
# to undo the dumpd done by some runnables / tracer / etc
"value": load(run.outputs),
"value": _load_suppress_warning(run.outputs),
},
{
"op": "add",

View File

@@ -3,7 +3,7 @@ from typing import Any, Dict
import pytest
from langchain_core._api.beta import beta, warn_beta
from langchain_core._api.beta_decorator import beta, warn_beta
from langchain_core.pydantic_v1 import BaseModel

View File

@@ -1,8 +1,12 @@
from langchain_core._api import __all__
EXPECTED_ALL = [
"beta",
"deprecated",
"LangChainBetaWarning",
"LangChainDeprecationWarning",
"suppress_langchain_beta_warning",
"surface_langchain_beta_warnings",
"suppress_langchain_deprecation_warning",
"surface_langchain_deprecation_warnings",
"warn_deprecated",