(all): update removal in deprecation warnings from 0.2 to 0.3 (#21265)

We are pushing out the removal of these to 0.3.

`find . -type f -name "*.py" -exec sed -i ''
's/removal="0\.2/removal="0.3/g' {} +`
This commit is contained in:
ccurme
2024-05-03 14:29:36 -04:00
committed by GitHub
parent d6e34f9ee5
commit 6da3d92b42
81 changed files with 127 additions and 127 deletions

View File

@@ -207,7 +207,7 @@ class BaseLanguageModel(
"""Implement this if there is a way of steering the model to generate responses that match a given schema.""" # noqa: E501
raise NotImplementedError()
@deprecated("0.1.7", alternative="invoke", removal="0.2.0")
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
@abstractmethod
def predict(
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
@@ -228,7 +228,7 @@ class BaseLanguageModel(
Top model prediction as a string.
"""
@deprecated("0.1.7", alternative="invoke", removal="0.2.0")
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
@abstractmethod
def predict_messages(
self,
@@ -253,7 +253,7 @@ class BaseLanguageModel(
Top model prediction as a message.
"""
@deprecated("0.1.7", alternative="ainvoke", removal="0.2.0")
@deprecated("0.1.7", alternative="ainvoke", removal="0.3.0")
@abstractmethod
async def apredict(
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
@@ -274,7 +274,7 @@ class BaseLanguageModel(
Top model prediction as a string.
"""
@deprecated("0.1.7", alternative="ainvoke", removal="0.2.0")
@deprecated("0.1.7", alternative="ainvoke", removal="0.3.0")
@abstractmethod
async def apredict_messages(
self,

View File

@@ -797,7 +797,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
break
yield item # type: ignore[misc]
@deprecated("0.1.7", alternative="invoke", removal="0.2.0")
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
def __call__(
self,
messages: List[BaseMessage],
@@ -829,13 +829,13 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
else:
raise ValueError("Unexpected generation type")
@deprecated("0.1.7", alternative="invoke", removal="0.2.0")
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
def call_as_llm(
self, message: str, stop: Optional[List[str]] = None, **kwargs: Any
) -> str:
return self.predict(message, stop=stop, **kwargs)
@deprecated("0.1.7", alternative="invoke", removal="0.2.0")
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
def predict(
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
) -> str:
@@ -849,7 +849,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
else:
raise ValueError("Cannot use predict when output is not a string.")
@deprecated("0.1.7", alternative="invoke", removal="0.2.0")
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
def predict_messages(
self,
messages: List[BaseMessage],
@@ -863,7 +863,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
_stop = list(stop)
return self(messages, stop=_stop, **kwargs)
@deprecated("0.1.7", alternative="ainvoke", removal="0.2.0")
@deprecated("0.1.7", alternative="ainvoke", removal="0.3.0")
async def apredict(
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
) -> str:
@@ -879,7 +879,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
else:
raise ValueError("Cannot use predict when output is not a string.")
@deprecated("0.1.7", alternative="ainvoke", removal="0.2.0")
@deprecated("0.1.7", alternative="ainvoke", removal="0.3.0")
async def apredict_messages(
self,
messages: List[BaseMessage],

View File

@@ -1064,7 +1064,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
generations = [existing_prompts[i] for i in range(len(prompts))]
return LLMResult(generations=generations, llm_output=llm_output, run=run_info)
@deprecated("0.1.7", alternative="invoke", removal="0.2.0")
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
def __call__(
self,
prompt: str,
@@ -1116,7 +1116,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
)
return result.generations[0][0].text
@deprecated("0.1.7", alternative="invoke", removal="0.2.0")
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
def predict(
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
) -> str:
@@ -1126,7 +1126,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
_stop = list(stop)
return self(text, stop=_stop, **kwargs)
@deprecated("0.1.7", alternative="invoke", removal="0.2.0")
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
def predict_messages(
self,
messages: List[BaseMessage],
@@ -1142,7 +1142,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
content = self(text, stop=_stop, **kwargs)
return AIMessage(content=content)
@deprecated("0.1.7", alternative="ainvoke", removal="0.2.0")
@deprecated("0.1.7", alternative="ainvoke", removal="0.3.0")
async def apredict(
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
) -> str:
@@ -1152,7 +1152,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
_stop = list(stop)
return await self._call_async(text, stop=_stop, **kwargs)
@deprecated("0.1.7", alternative="ainvoke", removal="0.2.0")
@deprecated("0.1.7", alternative="ainvoke", removal="0.3.0")
async def apredict_messages(
self,
messages: List[BaseMessage],

View File

@@ -14,7 +14,7 @@ from langchain_core.outputs import LLMResult
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
@deprecated("0.1.0", alternative="Use string instead.", removal="0.2.0")
@deprecated("0.1.0", alternative="Use string instead.", removal="0.3.0")
def RunTypeEnum() -> Type[RunTypeEnumDep]:
"""RunTypeEnum."""
warnings.warn(
@@ -25,7 +25,7 @@ def RunTypeEnum() -> Type[RunTypeEnumDep]:
return RunTypeEnumDep
@deprecated("0.1.0", removal="0.2.0")
@deprecated("0.1.0", removal="0.3.0")
class TracerSessionV1Base(BaseModel):
"""Base class for TracerSessionV1."""
@@ -34,33 +34,33 @@ class TracerSessionV1Base(BaseModel):
extra: Optional[Dict[str, Any]] = None
@deprecated("0.1.0", removal="0.2.0")
@deprecated("0.1.0", removal="0.3.0")
class TracerSessionV1Create(TracerSessionV1Base):
"""Create class for TracerSessionV1."""
@deprecated("0.1.0", removal="0.2.0")
@deprecated("0.1.0", removal="0.3.0")
class TracerSessionV1(TracerSessionV1Base):
"""TracerSessionV1 schema."""
id: int
@deprecated("0.1.0", removal="0.2.0")
@deprecated("0.1.0", removal="0.3.0")
class TracerSessionBase(TracerSessionV1Base):
"""Base class for TracerSession."""
tenant_id: UUID
@deprecated("0.1.0", removal="0.2.0")
@deprecated("0.1.0", removal="0.3.0")
class TracerSession(TracerSessionBase):
"""TracerSessionV1 schema for the V2 API."""
id: UUID
@deprecated("0.1.0", alternative="Run", removal="0.2.0")
@deprecated("0.1.0", alternative="Run", removal="0.3.0")
class BaseRun(BaseModel):
"""Base class for Run."""
@@ -76,7 +76,7 @@ class BaseRun(BaseModel):
error: Optional[str] = None
@deprecated("0.1.0", alternative="Run", removal="0.2.0")
@deprecated("0.1.0", alternative="Run", removal="0.3.0")
class LLMRun(BaseRun):
"""Class for LLMRun."""
@@ -84,7 +84,7 @@ class LLMRun(BaseRun):
response: Optional[LLMResult] = None
@deprecated("0.1.0", alternative="Run", removal="0.2.0")
@deprecated("0.1.0", alternative="Run", removal="0.3.0")
class ChainRun(BaseRun):
"""Class for ChainRun."""
@@ -95,7 +95,7 @@ class ChainRun(BaseRun):
child_tool_runs: List[ToolRun] = Field(default_factory=list)
@deprecated("0.1.0", alternative="Run", removal="0.2.0")
@deprecated("0.1.0", alternative="Run", removal="0.3.0")
class ToolRun(BaseRun):
"""Class for ToolRun."""

View File

@@ -78,7 +78,7 @@ def _rm_titles(kv: dict, prev_key: str = "") -> dict:
@deprecated(
"0.1.16",
alternative="langchain_core.utils.function_calling.convert_to_openai_function()",
removal="0.2.0",
removal="0.3.0",
)
def convert_pydantic_to_openai_function(
model: Type[BaseModel],
@@ -102,7 +102,7 @@ def convert_pydantic_to_openai_function(
@deprecated(
"0.1.16",
alternative="langchain_core.utils.function_calling.convert_to_openai_tool()",
removal="0.2.0",
removal="0.3.0",
)
def convert_pydantic_to_openai_tool(
model: Type[BaseModel],
@@ -213,7 +213,7 @@ def _get_python_function_required_args(function: Callable) -> List[str]:
@deprecated(
"0.1.16",
alternative="langchain_core.utils.function_calling.convert_to_openai_function()",
removal="0.2.0",
removal="0.3.0",
)
def convert_python_function_to_openai_function(
function: Callable,
@@ -239,7 +239,7 @@ def convert_python_function_to_openai_function(
@deprecated(
"0.1.16",
alternative="langchain_core.utils.function_calling.convert_to_openai_function()",
removal="0.2.0",
removal="0.3.0",
)
def format_tool_to_openai_function(tool: BaseTool) -> FunctionDescription:
"""Format tool into the OpenAI function API."""
@@ -269,7 +269,7 @@ def format_tool_to_openai_function(tool: BaseTool) -> FunctionDescription:
@deprecated(
"0.1.16",
alternative="langchain_core.utils.function_calling.convert_to_openai_tool()",
removal="0.2.0",
removal="0.3.0",
)
def format_tool_to_openai_tool(tool: BaseTool) -> ToolDescription:
"""Format tool into the OpenAI function API."""

View File

@@ -8,7 +8,7 @@ from langchain_core._api.deprecation import deprecated
@deprecated(
since="0.1.30",
removal="0.2",
removal="0.3",
message=(
"Using the hwchase17/langchain-hub "
"repo for prompts is deprecated. Please use "