mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-28 01:19:31 +00:00
multiple: update removal targets (#25361)
This commit is contained in:
parent
4029f5650c
commit
27690506d0
@ -1614,7 +1614,7 @@ ASTRA_DB_CACHE_DEFAULT_COLLECTION_NAME = "langchain_astradb_cache"
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.28",
|
since="0.0.28",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_astradb.AstraDBCache",
|
alternative_import="langchain_astradb.AstraDBCache",
|
||||||
)
|
)
|
||||||
class AstraDBCache(BaseCache):
|
class AstraDBCache(BaseCache):
|
||||||
@ -1819,7 +1819,7 @@ def _async_lru_cache(maxsize: int = 128, typed: bool = False) -> Callable:
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.28",
|
since="0.0.28",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_astradb.AstraDBSemanticCache",
|
alternative_import="langchain_astradb.AstraDBSemanticCache",
|
||||||
)
|
)
|
||||||
class AstraDBSemanticCache(BaseCache):
|
class AstraDBSemanticCache(BaseCache):
|
||||||
|
@ -65,7 +65,7 @@ def _get_message_data(service: Any, message: Any) -> ChatSession:
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.32",
|
since="0.0.32",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.GMailLoader",
|
alternative_import="langchain_google_community.GMailLoader",
|
||||||
)
|
)
|
||||||
class GMailLoader(BaseChatLoader):
|
class GMailLoader(BaseChatLoader):
|
||||||
|
@ -27,7 +27,7 @@ DEFAULT_COLLECTION_NAME = "langchain_message_store"
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.25",
|
since="0.0.25",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_astradb.AstraDBChatMessageHistory",
|
alternative_import="langchain_astradb.AstraDBChatMessageHistory",
|
||||||
)
|
)
|
||||||
class AstraDBChatMessageHistory(BaseChatMessageHistory):
|
class AstraDBChatMessageHistory(BaseChatMessageHistory):
|
||||||
|
@ -18,7 +18,7 @@ DEFAULT_COLLECTION_NAME = "message_store"
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.25",
|
since="0.0.25",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_mongodb.MongoDBChatMessageHistory",
|
alternative_import="langchain_mongodb.MongoDBChatMessageHistory",
|
||||||
)
|
)
|
||||||
class MongoDBChatMessageHistory(BaseChatMessageHistory):
|
class MongoDBChatMessageHistory(BaseChatMessageHistory):
|
||||||
|
@ -147,7 +147,7 @@ class SQLChatMessageHistory(BaseChatMessageHistory):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@deprecated("0.2.2", removal="0.3.0", alternative="session_maker")
|
@deprecated("0.2.2", removal="1.0", alternative="session_maker")
|
||||||
def Session(self) -> Union[scoped_session, async_sessionmaker]:
|
def Session(self) -> Union[scoped_session, async_sessionmaker]:
|
||||||
return self.session_maker
|
return self.session_maker
|
||||||
|
|
||||||
@ -185,7 +185,7 @@ class SQLChatMessageHistory(BaseChatMessageHistory):
|
|||||||
if not _warned_once_already:
|
if not _warned_once_already:
|
||||||
warn_deprecated(
|
warn_deprecated(
|
||||||
since="0.2.2",
|
since="0.2.2",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
name="connection_string",
|
name="connection_string",
|
||||||
alternative="connection",
|
alternative="connection",
|
||||||
)
|
)
|
||||||
|
@ -73,7 +73,7 @@ def convert_messages_to_prompt_anthropic(
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.28",
|
since="0.0.28",
|
||||||
removal="0.3",
|
removal="1.0",
|
||||||
alternative_import="langchain_anthropic.ChatAnthropic",
|
alternative_import="langchain_anthropic.ChatAnthropic",
|
||||||
)
|
)
|
||||||
class ChatAnthropic(BaseChatModel, _AnthropicCommon):
|
class ChatAnthropic(BaseChatModel, _AnthropicCommon):
|
||||||
|
@ -20,7 +20,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.10",
|
since="0.0.10",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_openai.AzureChatOpenAI",
|
alternative_import="langchain_openai.AzureChatOpenAI",
|
||||||
)
|
)
|
||||||
class AzureChatOpenAI(ChatOpenAI):
|
class AzureChatOpenAI(ChatOpenAI):
|
||||||
|
@ -202,7 +202,7 @@ _message_type_lookups = {
|
|||||||
|
|
||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.34", removal="0.3", alternative_import="langchain_aws.ChatBedrock"
|
since="0.0.34", removal="1.0", alternative_import="langchain_aws.ChatBedrock"
|
||||||
)
|
)
|
||||||
class BedrockChat(BaseChatModel, BedrockBase):
|
class BedrockChat(BaseChatModel, BedrockBase):
|
||||||
"""Chat model that uses the Bedrock API."""
|
"""Chat model that uses the Bedrock API."""
|
||||||
|
@ -96,7 +96,7 @@ def get_cohere_chat_request(
|
|||||||
|
|
||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.30", removal="0.3.0", alternative_import="langchain_cohere.ChatCohere"
|
since="0.0.30", removal="1.0", alternative_import="langchain_cohere.ChatCohere"
|
||||||
)
|
)
|
||||||
class ChatCohere(BaseChatModel, BaseCohere):
|
class ChatCohere(BaseChatModel, BaseCohere):
|
||||||
"""`Cohere` chat large language models.
|
"""`Cohere` chat large language models.
|
||||||
|
@ -81,7 +81,7 @@ def convert_dict_to_message(_dict: Any) -> BaseMessage:
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.26",
|
since="0.0.26",
|
||||||
removal="0.3",
|
removal="1.0",
|
||||||
alternative_import="langchain_fireworks.ChatFireworks",
|
alternative_import="langchain_fireworks.ChatFireworks",
|
||||||
)
|
)
|
||||||
class ChatFireworks(BaseChatModel):
|
class ChatFireworks(BaseChatModel):
|
||||||
|
@ -38,7 +38,7 @@ DEFAULT_SYSTEM_PROMPT = """You are a helpful, respectful, and honest assistant."
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.37",
|
since="0.0.37",
|
||||||
removal="0.3",
|
removal="1.0",
|
||||||
alternative_import="langchain_huggingface.ChatHuggingFace",
|
alternative_import="langchain_huggingface.ChatHuggingFace",
|
||||||
)
|
)
|
||||||
class ChatHuggingFace(BaseChatModel):
|
class ChatHuggingFace(BaseChatModel):
|
||||||
|
@ -147,7 +147,7 @@ def _convert_delta_to_message_chunk(
|
|||||||
|
|
||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.10", removal="0.3.0", alternative_import="langchain_openai.ChatOpenAI"
|
since="0.0.10", removal="1.0", alternative_import="langchain_openai.ChatOpenAI"
|
||||||
)
|
)
|
||||||
class ChatOpenAI(BaseChatModel):
|
class ChatOpenAI(BaseChatModel):
|
||||||
"""`OpenAI` Chat large language models API.
|
"""`OpenAI` Chat large language models API.
|
||||||
|
@ -11,7 +11,7 @@ from langchain_community.llms.solar import SOLAR_SERVICE_URL_BASE, SolarCommon
|
|||||||
|
|
||||||
|
|
||||||
@deprecated( # type: ignore[arg-type]
|
@deprecated( # type: ignore[arg-type]
|
||||||
since="0.0.34", removal="0.3.0", alternative_import="langchain_upstage.ChatUpstage"
|
since="0.0.34", removal="1.0", alternative_import="langchain_upstage.ChatUpstage"
|
||||||
)
|
)
|
||||||
class SolarChat(SolarCommon, ChatOpenAI):
|
class SolarChat(SolarCommon, ChatOpenAI):
|
||||||
"""Wrapper around Solar large language models.
|
"""Wrapper around Solar large language models.
|
||||||
|
@ -206,7 +206,7 @@ def _get_question(messages: List[BaseMessage]) -> HumanMessage:
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.12",
|
since="0.0.12",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_vertexai.ChatVertexAI",
|
alternative_import="langchain_google_vertexai.ChatVertexAI",
|
||||||
)
|
)
|
||||||
class ChatVertexAI(_VertexAICommon, BaseChatModel):
|
class ChatVertexAI(_VertexAICommon, BaseChatModel):
|
||||||
|
@ -27,7 +27,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.29",
|
since="0.0.29",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_astradb.AstraDBLoader",
|
alternative_import="langchain_astradb.AstraDBLoader",
|
||||||
)
|
)
|
||||||
class AstraDBLoader(BaseLoader):
|
class AstraDBLoader(BaseLoader):
|
||||||
|
@ -14,7 +14,7 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.32",
|
since="0.0.32",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.BigQueryLoader",
|
alternative_import="langchain_google_community.BigQueryLoader",
|
||||||
)
|
)
|
||||||
class BigQueryLoader(BaseLoader):
|
class BigQueryLoader(BaseLoader):
|
||||||
|
@ -29,7 +29,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.24",
|
since="0.0.24",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="docugami_langchain.DocugamiLoader",
|
alternative_import="docugami_langchain.DocugamiLoader",
|
||||||
)
|
)
|
||||||
class DocugamiLoader(BaseLoader, BaseModel):
|
class DocugamiLoader(BaseLoader, BaseModel):
|
||||||
|
@ -13,7 +13,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.32",
|
since="0.0.32",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.GCSDirectoryLoader",
|
alternative_import="langchain_google_community.GCSDirectoryLoader",
|
||||||
)
|
)
|
||||||
class GCSDirectoryLoader(BaseLoader):
|
class GCSDirectoryLoader(BaseLoader):
|
||||||
|
@ -12,7 +12,7 @@ from langchain_community.utilities.vertexai import get_client_info
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.32",
|
since="0.0.32",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.GCSFileLoader",
|
alternative_import="langchain_google_community.GCSFileLoader",
|
||||||
)
|
)
|
||||||
class GCSFileLoader(BaseLoader):
|
class GCSFileLoader(BaseLoader):
|
||||||
|
@ -15,7 +15,7 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.32",
|
since="0.0.32",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.SpeechToTextLoader",
|
alternative_import="langchain_google_community.SpeechToTextLoader",
|
||||||
)
|
)
|
||||||
class GoogleSpeechToTextLoader(BaseLoader):
|
class GoogleSpeechToTextLoader(BaseLoader):
|
||||||
|
@ -21,7 +21,7 @@ SCOPES = ["https://www.googleapis.com/auth/drive.readonly"]
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.32",
|
since="0.0.32",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.GoogleDriveLoader",
|
alternative_import="langchain_google_community.GoogleDriveLoader",
|
||||||
)
|
)
|
||||||
class GoogleDriveLoader(BaseLoader, BaseModel):
|
class GoogleDriveLoader(BaseLoader, BaseModel):
|
||||||
|
@ -37,7 +37,7 @@ class DocAIParsingResults:
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.32",
|
since="0.0.32",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.DocAIParser",
|
alternative_import="langchain_google_community.DocAIParser",
|
||||||
)
|
)
|
||||||
class DocAIParser(BaseBlobParser):
|
class DocAIParser(BaseBlobParser):
|
||||||
|
@ -168,7 +168,7 @@ class UnstructuredBaseLoader(BaseLoader, ABC):
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.2.8",
|
since="0.2.8",
|
||||||
removal="0.4.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_unstructured.UnstructuredLoader",
|
alternative_import="langchain_unstructured.UnstructuredLoader",
|
||||||
)
|
)
|
||||||
class UnstructuredFileLoader(UnstructuredBaseLoader):
|
class UnstructuredFileLoader(UnstructuredBaseLoader):
|
||||||
@ -269,7 +269,7 @@ def get_elements_from_api(
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.2.8",
|
since="0.2.8",
|
||||||
removal="0.4.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_unstructured.UnstructuredLoader",
|
alternative_import="langchain_unstructured.UnstructuredLoader",
|
||||||
)
|
)
|
||||||
class UnstructuredAPIFileLoader(UnstructuredBaseLoader):
|
class UnstructuredAPIFileLoader(UnstructuredBaseLoader):
|
||||||
@ -351,7 +351,7 @@ class UnstructuredAPIFileLoader(UnstructuredBaseLoader):
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.2.8",
|
since="0.2.8",
|
||||||
removal="0.4.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_unstructured.UnstructuredLoader",
|
alternative_import="langchain_unstructured.UnstructuredLoader",
|
||||||
)
|
)
|
||||||
class UnstructuredFileIOLoader(UnstructuredBaseLoader):
|
class UnstructuredFileIOLoader(UnstructuredBaseLoader):
|
||||||
@ -418,7 +418,7 @@ class UnstructuredFileIOLoader(UnstructuredBaseLoader):
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.2.8",
|
since="0.2.8",
|
||||||
removal="0.4.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_unstructured.UnstructuredLoader",
|
alternative_import="langchain_unstructured.UnstructuredLoader",
|
||||||
)
|
)
|
||||||
class UnstructuredAPIFileIOLoader(UnstructuredBaseLoader):
|
class UnstructuredAPIFileIOLoader(UnstructuredBaseLoader):
|
||||||
|
@ -8,7 +8,7 @@ from langchain_community.utilities.vertexai import get_client_info
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.32",
|
since="0.0.32",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.DocAIParser",
|
alternative_import="langchain_google_community.DocAIParser",
|
||||||
)
|
)
|
||||||
class GoogleTranslateTransformer(BaseDocumentTransformer):
|
class GoogleTranslateTransformer(BaseDocumentTransformer):
|
||||||
|
@ -16,7 +16,7 @@ from langchain_community.utils.openai import is_openai_v1
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.9",
|
since="0.0.9",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_openai.AzureOpenAIEmbeddings",
|
alternative_import="langchain_openai.AzureOpenAIEmbeddings",
|
||||||
)
|
)
|
||||||
class AzureOpenAIEmbeddings(OpenAIEmbeddings):
|
class AzureOpenAIEmbeddings(OpenAIEmbeddings):
|
||||||
|
@ -12,7 +12,7 @@ from langchain_core.runnables.config import run_in_executor
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.2.11",
|
since="0.2.11",
|
||||||
removal="0.4.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_aws.BedrockEmbeddings",
|
alternative_import="langchain_aws.BedrockEmbeddings",
|
||||||
)
|
)
|
||||||
class BedrockEmbeddings(BaseModel, Embeddings):
|
class BedrockEmbeddings(BaseModel, Embeddings):
|
||||||
|
@ -10,7 +10,7 @@ from langchain_community.llms.cohere import _create_retry_decorator
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.30",
|
since="0.0.30",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_cohere.CohereEmbeddings",
|
alternative_import="langchain_cohere.CohereEmbeddings",
|
||||||
)
|
)
|
||||||
class CohereEmbeddings(BaseModel, Embeddings):
|
class CohereEmbeddings(BaseModel, Embeddings):
|
||||||
|
@ -21,7 +21,7 @@ DEFAULT_QUERY_BGE_INSTRUCTION_ZH = "为这个句子生成表示以用于检索
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.2.2",
|
since="0.2.2",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_huggingface.HuggingFaceEmbeddings",
|
alternative_import="langchain_huggingface.HuggingFaceEmbeddings",
|
||||||
)
|
)
|
||||||
class HuggingFaceEmbeddings(BaseModel, Embeddings):
|
class HuggingFaceEmbeddings(BaseModel, Embeddings):
|
||||||
@ -171,7 +171,7 @@ class HuggingFaceInstructEmbeddings(BaseModel, Embeddings):
|
|||||||
if "show_progress_bar" in self.encode_kwargs:
|
if "show_progress_bar" in self.encode_kwargs:
|
||||||
warn_deprecated(
|
warn_deprecated(
|
||||||
since="0.2.5",
|
since="0.2.5",
|
||||||
removal="0.4.0",
|
removal="1.0",
|
||||||
name="encode_kwargs['show_progress_bar']",
|
name="encode_kwargs['show_progress_bar']",
|
||||||
alternative=f"the show_progress method on {self.__class__.__name__}",
|
alternative=f"the show_progress method on {self.__class__.__name__}",
|
||||||
)
|
)
|
||||||
@ -298,7 +298,7 @@ class HuggingFaceBgeEmbeddings(BaseModel, Embeddings):
|
|||||||
if "show_progress_bar" in self.encode_kwargs:
|
if "show_progress_bar" in self.encode_kwargs:
|
||||||
warn_deprecated(
|
warn_deprecated(
|
||||||
since="0.2.5",
|
since="0.2.5",
|
||||||
removal="0.4.0",
|
removal="1.0",
|
||||||
name="encode_kwargs['show_progress_bar']",
|
name="encode_kwargs['show_progress_bar']",
|
||||||
alternative=f"the show_progress method on {self.__class__.__name__}",
|
alternative=f"the show_progress method on {self.__class__.__name__}",
|
||||||
)
|
)
|
||||||
|
@ -12,7 +12,7 @@ VALID_TASKS = ("feature-extraction",)
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.2.2",
|
since="0.2.2",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_huggingface.HuggingFaceEndpointEmbeddings",
|
alternative_import="langchain_huggingface.HuggingFaceEndpointEmbeddings",
|
||||||
)
|
)
|
||||||
class HuggingFaceHubEmbeddings(BaseModel, Embeddings):
|
class HuggingFaceHubEmbeddings(BaseModel, Embeddings):
|
||||||
|
@ -144,7 +144,7 @@ async def async_embed_with_retry(embeddings: OpenAIEmbeddings, **kwargs: Any) ->
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.9",
|
since="0.0.9",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_openai.OpenAIEmbeddings",
|
alternative_import="langchain_openai.OpenAIEmbeddings",
|
||||||
)
|
)
|
||||||
class OpenAIEmbeddings(BaseModel, Embeddings):
|
class OpenAIEmbeddings(BaseModel, Embeddings):
|
||||||
|
@ -46,7 +46,7 @@ def embed_with_retry(embeddings: SolarEmbeddings, *args: Any, **kwargs: Any) ->
|
|||||||
|
|
||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.34", removal="0.3.0", alternative_import="langchain_upstage.ChatUpstage"
|
since="0.0.34", removal="1.0", alternative_import="langchain_upstage.ChatUpstage"
|
||||||
)
|
)
|
||||||
class SolarEmbeddings(BaseModel, Embeddings):
|
class SolarEmbeddings(BaseModel, Embeddings):
|
||||||
"""Solar's embedding service.
|
"""Solar's embedding service.
|
||||||
|
@ -22,7 +22,7 @@ _MIN_BATCH_SIZE = 5
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.12",
|
since="0.0.12",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_vertexai.VertexAIEmbeddings",
|
alternative_import="langchain_google_vertexai.VertexAIEmbeddings",
|
||||||
)
|
)
|
||||||
class VertexAIEmbeddings(_VertexAICommon, Embeddings):
|
class VertexAIEmbeddings(_VertexAICommon, Embeddings):
|
||||||
|
@ -61,7 +61,7 @@ def embed_with_retry(embeddings: VoyageEmbeddings, **kwargs: Any) -> Any:
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.29",
|
since="0.0.29",
|
||||||
removal="0.3",
|
removal="1.0",
|
||||||
alternative_import="langchain_voyageai.VoyageAIEmbeddings",
|
alternative_import="langchain_voyageai.VoyageAIEmbeddings",
|
||||||
)
|
)
|
||||||
class VoyageEmbeddings(BaseModel, Embeddings):
|
class VoyageEmbeddings(BaseModel, Embeddings):
|
||||||
|
@ -172,7 +172,7 @@ def _import_databricks() -> Type[BaseLLM]:
|
|||||||
def _import_databricks_chat() -> Any:
|
def _import_databricks_chat() -> Any:
|
||||||
warn_deprecated(
|
warn_deprecated(
|
||||||
since="0.0.22",
|
since="0.0.22",
|
||||||
removal="0.3",
|
removal="1.0",
|
||||||
alternative_import="langchain_community.chat_models.ChatDatabricks",
|
alternative_import="langchain_community.chat_models.ChatDatabricks",
|
||||||
)
|
)
|
||||||
from langchain_community.chat_models.databricks import ChatDatabricks
|
from langchain_community.chat_models.databricks import ChatDatabricks
|
||||||
@ -342,7 +342,7 @@ def _import_mlflow() -> Type[BaseLLM]:
|
|||||||
def _import_mlflow_chat() -> Any:
|
def _import_mlflow_chat() -> Any:
|
||||||
warn_deprecated(
|
warn_deprecated(
|
||||||
since="0.0.22",
|
since="0.0.22",
|
||||||
removal="0.3",
|
removal="1.0",
|
||||||
alternative_import="langchain_community.chat_models.ChatMlflow",
|
alternative_import="langchain_community.chat_models.ChatMlflow",
|
||||||
)
|
)
|
||||||
from langchain_community.chat_models.mlflow import ChatMlflow
|
from langchain_community.chat_models.mlflow import ChatMlflow
|
||||||
|
@ -151,7 +151,7 @@ class _AnthropicCommon(BaseLanguageModel):
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.28",
|
since="0.0.28",
|
||||||
removal="0.3",
|
removal="1.0",
|
||||||
alternative_import="langchain_anthropic.AnthropicLLM",
|
alternative_import="langchain_anthropic.AnthropicLLM",
|
||||||
)
|
)
|
||||||
class Anthropic(LLM, _AnthropicCommon):
|
class Anthropic(LLM, _AnthropicCommon):
|
||||||
|
@ -713,7 +713,7 @@ class BedrockBase(BaseModel, ABC):
|
|||||||
|
|
||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.34", removal="0.3", alternative_import="langchain_aws.BedrockLLM"
|
since="0.0.34", removal="1.0", alternative_import="langchain_aws.BedrockLLM"
|
||||||
)
|
)
|
||||||
class Bedrock(LLM, BedrockBase):
|
class Bedrock(LLM, BedrockBase):
|
||||||
"""Bedrock models.
|
"""Bedrock models.
|
||||||
|
@ -71,7 +71,7 @@ def acompletion_with_retry(llm: Cohere, **kwargs: Any) -> Any:
|
|||||||
|
|
||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.30", removal="0.3.0", alternative_import="langchain_cohere.BaseCohere"
|
since="0.0.30", removal="1.0", alternative_import="langchain_cohere.BaseCohere"
|
||||||
)
|
)
|
||||||
class BaseCohere(Serializable):
|
class BaseCohere(Serializable):
|
||||||
"""Base class for Cohere models."""
|
"""Base class for Cohere models."""
|
||||||
@ -121,9 +121,7 @@ class BaseCohere(Serializable):
|
|||||||
return values
|
return values
|
||||||
|
|
||||||
|
|
||||||
@deprecated(
|
@deprecated(since="0.1.14", removal="1.0", alternative_import="langchain_cohere.Cohere")
|
||||||
since="0.1.14", removal="0.3.0", alternative_import="langchain_cohere.Cohere"
|
|
||||||
)
|
|
||||||
class Cohere(LLM, BaseCohere):
|
class Cohere(LLM, BaseCohere):
|
||||||
"""Cohere large language models.
|
"""Cohere large language models.
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ def _stream_response_to_generation_chunk(
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.26",
|
since="0.0.26",
|
||||||
removal="0.3",
|
removal="1.0",
|
||||||
alternative_import="langchain_fireworks.Fireworks",
|
alternative_import="langchain_fireworks.Fireworks",
|
||||||
)
|
)
|
||||||
class Fireworks(BaseLLM):
|
class Fireworks(BaseLLM):
|
||||||
|
@ -28,7 +28,7 @@ VALID_TASKS = (
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.37",
|
since="0.0.37",
|
||||||
removal="0.3",
|
removal="1.0",
|
||||||
alternative_import="langchain_huggingface.HuggingFaceEndpoint",
|
alternative_import="langchain_huggingface.HuggingFaceEndpoint",
|
||||||
)
|
)
|
||||||
class HuggingFaceEndpoint(LLM):
|
class HuggingFaceEndpoint(LLM):
|
||||||
|
@ -21,7 +21,7 @@ VALID_TASKS_DICT = {
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
"0.0.21",
|
"0.0.21",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_huggingface.HuggingFaceEndpoint",
|
alternative_import="langchain_huggingface.HuggingFaceEndpoint",
|
||||||
)
|
)
|
||||||
class HuggingFaceHub(LLM):
|
class HuggingFaceHub(LLM):
|
||||||
|
@ -24,7 +24,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.37",
|
since="0.0.37",
|
||||||
removal="0.3",
|
removal="1.0",
|
||||||
alternative_import="langchain_huggingface.HuggingFacePipeline",
|
alternative_import="langchain_huggingface.HuggingFacePipeline",
|
||||||
)
|
)
|
||||||
class HuggingFacePipeline(BaseLLM):
|
class HuggingFacePipeline(BaseLLM):
|
||||||
|
@ -16,7 +16,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
"0.0.21",
|
"0.0.21",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_huggingface.HuggingFaceEndpoint",
|
alternative_import="langchain_huggingface.HuggingFaceEndpoint",
|
||||||
)
|
)
|
||||||
class HuggingFaceTextGenInference(LLM):
|
class HuggingFaceTextGenInference(LLM):
|
||||||
|
@ -730,9 +730,7 @@ class BaseOpenAI(BaseLLM):
|
|||||||
return self.max_context_size - num_tokens
|
return self.max_context_size - num_tokens
|
||||||
|
|
||||||
|
|
||||||
@deprecated(
|
@deprecated(since="0.0.10", removal="1.0", alternative_import="langchain_openai.OpenAI")
|
||||||
since="0.0.10", removal="0.3.0", alternative_import="langchain_openai.OpenAI"
|
|
||||||
)
|
|
||||||
class OpenAI(BaseOpenAI):
|
class OpenAI(BaseOpenAI):
|
||||||
"""OpenAI large language models.
|
"""OpenAI large language models.
|
||||||
|
|
||||||
@ -760,7 +758,7 @@ class OpenAI(BaseOpenAI):
|
|||||||
|
|
||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.10", removal="0.3.0", alternative_import="langchain_openai.AzureOpenAI"
|
since="0.0.10", removal="1.0", alternative_import="langchain_openai.AzureOpenAI"
|
||||||
)
|
)
|
||||||
class AzureOpenAI(BaseOpenAI):
|
class AzureOpenAI(BaseOpenAI):
|
||||||
"""Azure-specific OpenAI large language models.
|
"""Azure-specific OpenAI large language models.
|
||||||
@ -968,7 +966,7 @@ class AzureOpenAI(BaseOpenAI):
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.1",
|
since="0.0.1",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_openai.ChatOpenAI",
|
alternative_import="langchain_openai.ChatOpenAI",
|
||||||
)
|
)
|
||||||
class OpenAIChat(BaseLLM):
|
class OpenAIChat(BaseLLM):
|
||||||
|
@ -19,7 +19,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.12", removal="0.3", alternative_import="langchain_together.Together"
|
since="0.0.12", removal="1.0", alternative_import="langchain_together.Together"
|
||||||
)
|
)
|
||||||
class Together(LLM):
|
class Together(LLM):
|
||||||
"""LLM models from `Together`.
|
"""LLM models from `Together`.
|
||||||
|
@ -203,7 +203,7 @@ class _VertexAICommon(_VertexAIBase):
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.12",
|
since="0.0.12",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_vertexai.VertexAI",
|
alternative_import="langchain_google_vertexai.VertexAI",
|
||||||
)
|
)
|
||||||
class VertexAI(_VertexAICommon, BaseLLM):
|
class VertexAI(_VertexAICommon, BaseLLM):
|
||||||
@ -393,7 +393,7 @@ class VertexAI(_VertexAICommon, BaseLLM):
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.12",
|
since="0.0.12",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_vertexai.VertexAIModelGarden",
|
alternative_import="langchain_google_vertexai.VertexAIModelGarden",
|
||||||
)
|
)
|
||||||
class VertexAIModelGarden(_VertexAIBase, BaseLLM):
|
class VertexAIModelGarden(_VertexAIBase, BaseLLM):
|
||||||
|
@ -13,7 +13,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.18", removal="0.3", alternative_import="langchain_ibm.WatsonxLLM"
|
since="0.0.18", removal="1.0", alternative_import="langchain_ibm.WatsonxLLM"
|
||||||
)
|
)
|
||||||
class WatsonxLLM(BaseLLM):
|
class WatsonxLLM(BaseLLM):
|
||||||
"""
|
"""
|
||||||
|
@ -43,7 +43,7 @@ def _get_docs(response: Any) -> List[Document]:
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.30",
|
since="0.0.30",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_cohere.CohereRagRetriever",
|
alternative_import="langchain_cohere.CohereRagRetriever",
|
||||||
)
|
)
|
||||||
class CohereRagRetriever(BaseRetriever):
|
class CohereRagRetriever(BaseRetriever):
|
||||||
|
@ -23,7 +23,7 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.32",
|
since="0.0.32",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.DocumentAIWarehouseRetriever",
|
alternative_import="langchain_google_community.DocumentAIWarehouseRetriever",
|
||||||
)
|
)
|
||||||
class GoogleDocumentAIWarehouseRetriever(BaseRetriever):
|
class GoogleDocumentAIWarehouseRetriever(BaseRetriever):
|
||||||
|
@ -198,7 +198,7 @@ class _BaseGoogleVertexAISearchRetriever(BaseModel):
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.33",
|
since="0.0.33",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.VertexAISearchRetriever",
|
alternative_import="langchain_google_community.VertexAISearchRetriever",
|
||||||
)
|
)
|
||||||
class GoogleVertexAISearchRetriever(BaseRetriever, _BaseGoogleVertexAISearchRetriever):
|
class GoogleVertexAISearchRetriever(BaseRetriever, _BaseGoogleVertexAISearchRetriever):
|
||||||
@ -396,7 +396,7 @@ class GoogleVertexAISearchRetriever(BaseRetriever, _BaseGoogleVertexAISearchRetr
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.33",
|
since="0.0.33",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.VertexAIMultiTurnSearchRetriever",
|
alternative_import="langchain_google_community.VertexAIMultiTurnSearchRetriever",
|
||||||
)
|
)
|
||||||
class GoogleVertexAIMultiTurnSearchRetriever(
|
class GoogleVertexAIMultiTurnSearchRetriever(
|
||||||
|
@ -99,7 +99,7 @@ class AstraDBBaseStore(Generic[V], BaseStore[str, V], ABC):
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.22",
|
since="0.0.22",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_astradb.AstraDBStore",
|
alternative_import="langchain_astradb.AstraDBStore",
|
||||||
)
|
)
|
||||||
class AstraDBStore(AstraDBBaseStore[Any]):
|
class AstraDBStore(AstraDBBaseStore[Any]):
|
||||||
@ -167,7 +167,7 @@ class AstraDBStore(AstraDBBaseStore[Any]):
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.22",
|
since="0.0.22",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_astradb.AstraDBByteStore",
|
alternative_import="langchain_astradb.AstraDBByteStore",
|
||||||
)
|
)
|
||||||
class AstraDBByteStore(AstraDBBaseStore[bytes], ByteStore):
|
class AstraDBByteStore(AstraDBBaseStore[bytes], ByteStore):
|
||||||
|
@ -39,7 +39,7 @@ def _encoding_file_extension_map(encoding: texttospeech.AudioEncoding) -> Option
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.33",
|
since="0.0.33",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.TextToSpeechTool",
|
alternative_import="langchain_google_community.TextToSpeechTool",
|
||||||
)
|
)
|
||||||
class GoogleCloudTextToSpeechTool(BaseTool):
|
class GoogleCloudTextToSpeechTool(BaseTool):
|
||||||
|
@ -18,7 +18,7 @@ class GooglePlacesSchema(BaseModel):
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.33",
|
since="0.0.33",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.GooglePlacesTool",
|
alternative_import="langchain_google_community.GooglePlacesTool",
|
||||||
)
|
)
|
||||||
class GooglePlacesTool(BaseTool):
|
class GooglePlacesTool(BaseTool):
|
||||||
|
@ -11,7 +11,7 @@ from langchain_community.utilities.google_search import GoogleSearchAPIWrapper
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.33",
|
since="0.0.33",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.GoogleSearchRun",
|
alternative_import="langchain_google_community.GoogleSearchRun",
|
||||||
)
|
)
|
||||||
class GoogleSearchRun(BaseTool):
|
class GoogleSearchRun(BaseTool):
|
||||||
@ -36,7 +36,7 @@ class GoogleSearchRun(BaseTool):
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.33",
|
since="0.0.33",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.GoogleSearchResults",
|
alternative_import="langchain_google_community.GoogleSearchResults",
|
||||||
)
|
)
|
||||||
class GoogleSearchResults(BaseTool):
|
class GoogleSearchResults(BaseTool):
|
||||||
|
@ -14,7 +14,7 @@ from langchain_community.utilities.metaphor_search import MetaphorSearchAPIWrapp
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.15",
|
since="0.0.15",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative="langchain_exa.ExaSearchResults",
|
alternative="langchain_exa.ExaSearchResults",
|
||||||
)
|
)
|
||||||
class MetaphorSearchResults(BaseTool):
|
class MetaphorSearchResults(BaseTool):
|
||||||
|
@ -10,7 +10,7 @@ from langchain_core.utils import get_from_dict_or_env
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.33",
|
since="0.0.33",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.GooglePlacesAPIWrapper",
|
alternative_import="langchain_google_community.GooglePlacesAPIWrapper",
|
||||||
)
|
)
|
||||||
class GooglePlacesAPIWrapper(BaseModel):
|
class GooglePlacesAPIWrapper(BaseModel):
|
||||||
|
@ -9,7 +9,7 @@ from langchain_core.utils import get_from_dict_or_env
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.33",
|
since="0.0.33",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.GoogleSearchAPIWrapper",
|
alternative_import="langchain_google_community.GoogleSearchAPIWrapper",
|
||||||
)
|
)
|
||||||
class GoogleSearchAPIWrapper(BaseModel):
|
class GoogleSearchAPIWrapper(BaseModel):
|
||||||
|
@ -287,7 +287,7 @@ class SQLDatabase:
|
|||||||
return sorted(self._include_tables)
|
return sorted(self._include_tables)
|
||||||
return sorted(self._all_tables - self._ignore_tables)
|
return sorted(self._all_tables - self._ignore_tables)
|
||||||
|
|
||||||
@deprecated("0.0.1", alternative="get_usable_table_names", removal="0.3.0")
|
@deprecated("0.0.1", alternative="get_usable_table_names", removal="1.0")
|
||||||
def get_table_names(self) -> Iterable[str]:
|
def get_table_names(self) -> Iterable[str]:
|
||||||
"""Get names of tables available."""
|
"""Get names of tables available."""
|
||||||
return self.get_usable_table_names()
|
return self.get_usable_table_names()
|
||||||
|
@ -67,7 +67,7 @@ def _unique_list(lst: List[T], key: Callable[[T], U]) -> List[T]:
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.21",
|
since="0.0.21",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_astradb.AstraDBVectorStore",
|
alternative_import="langchain_astradb.AstraDBVectorStore",
|
||||||
)
|
)
|
||||||
class AstraDB(VectorStore):
|
class AstraDB(VectorStore):
|
||||||
|
@ -38,7 +38,7 @@ _vector_table_lock = Lock() # process-wide BigQueryVectorSearch table lock
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.33",
|
since="0.0.33",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_community.BigQueryVectorSearch",
|
alternative_import="langchain_google_community.BigQueryVectorSearch",
|
||||||
)
|
)
|
||||||
class BigQueryVectorSearch(VectorStore):
|
class BigQueryVectorSearch(VectorStore):
|
||||||
|
@ -50,7 +50,7 @@ def _results_to_docs_and_scores(results: Any) -> List[Tuple[Document, float]]:
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@deprecated(since="0.2.9", removal="0.4", alternative_import="langchain_chroma.Chroma")
|
@deprecated(since="0.2.9", removal="1.0", alternative_import="langchain_chroma.Chroma")
|
||||||
class Chroma(VectorStore):
|
class Chroma(VectorStore):
|
||||||
"""`ChromaDB` vector store.
|
"""`ChromaDB` vector store.
|
||||||
|
|
||||||
@ -705,7 +705,7 @@ class Chroma(VectorStore):
|
|||||||
"Since Chroma 0.4.x the manual persistence method is no longer "
|
"Since Chroma 0.4.x the manual persistence method is no longer "
|
||||||
"supported as docs are automatically persisted."
|
"supported as docs are automatically persisted."
|
||||||
),
|
),
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
)
|
)
|
||||||
def persist(self) -> None:
|
def persist(self) -> None:
|
||||||
"""Persist the collection.
|
"""Persist the collection.
|
||||||
|
@ -14,7 +14,7 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.2.4",
|
since="0.2.4",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_couchbase.CouchbaseVectorStore",
|
alternative_import="langchain_couchbase.CouchbaseVectorStore",
|
||||||
)
|
)
|
||||||
class CouchbaseVectorStore(VectorStore):
|
class CouchbaseVectorStore(VectorStore):
|
||||||
|
@ -657,7 +657,7 @@ def _alias_filters(kwargs: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
|||||||
if "filters" in kwargs:
|
if "filters" in kwargs:
|
||||||
warn_deprecated(
|
warn_deprecated(
|
||||||
since="0.2.11",
|
since="0.2.11",
|
||||||
removal="0.3",
|
removal="1.0",
|
||||||
message="DatabricksVectorSearch received a key `filters` in search_kwargs. "
|
message="DatabricksVectorSearch received a key `filters` in search_kwargs. "
|
||||||
"`filters` was deprecated since langchain-community 0.2.11 and will "
|
"`filters` was deprecated since langchain-community 0.2.11 and will "
|
||||||
"be removed in 0.3. Please use `filter` instead.",
|
"be removed in 0.3. Please use `filter` instead.",
|
||||||
|
@ -28,7 +28,7 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.12",
|
since="0.0.12",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_google_vertexai.VectorSearchVectorStore",
|
alternative_import="langchain_google_vertexai.VectorSearchVectorStore",
|
||||||
)
|
)
|
||||||
class MatchingEngine(VectorStore):
|
class MatchingEngine(VectorStore):
|
||||||
|
@ -25,7 +25,7 @@ DEFAULT_MILVUS_CONNECTION = {
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.2.0",
|
since="0.2.0",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_milvus.MilvusVectorStore",
|
alternative_import="langchain_milvus.MilvusVectorStore",
|
||||||
)
|
)
|
||||||
class Milvus(VectorStore):
|
class Milvus(VectorStore):
|
||||||
|
@ -35,7 +35,7 @@ DEFAULT_INSERT_BATCH_SIZE = 100
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.25",
|
since="0.0.25",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative_import="langchain_mongodb.MongoDBAtlasVectorSearch",
|
alternative_import="langchain_mongodb.MongoDBAtlasVectorSearch",
|
||||||
)
|
)
|
||||||
class MongoDBAtlasVectorSearch(VectorStore):
|
class MongoDBAtlasVectorSearch(VectorStore):
|
||||||
|
@ -43,7 +43,7 @@ def _is_pinecone_v3() -> bool:
|
|||||||
|
|
||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.18", removal="0.3.0", alternative_import="langchain_pinecone.Pinecone"
|
since="0.0.18", removal="1.0", alternative_import="langchain_pinecone.Pinecone"
|
||||||
)
|
)
|
||||||
class Pinecone(VectorStore):
|
class Pinecone(VectorStore):
|
||||||
"""`Pinecone` vector store.
|
"""`Pinecone` vector store.
|
||||||
|
@ -66,9 +66,7 @@ def sync_call_fallback(method: Callable) -> Callable:
|
|||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
@deprecated(
|
@deprecated(since="0.0.37", removal="1.0", alternative_import="langchain_qdrant.Qdrant")
|
||||||
since="0.0.37", removal="0.3.0", alternative_import="langchain_qdrant.Qdrant"
|
|
||||||
)
|
|
||||||
class Qdrant(VectorStore):
|
class Qdrant(VectorStore):
|
||||||
"""`Qdrant` vector store.
|
"""`Qdrant` vector store.
|
||||||
|
|
||||||
|
@ -220,7 +220,7 @@ class BaseLanguageModel(
|
|||||||
# generate responses that match a given schema.
|
# generate responses that match a given schema.
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
|
@deprecated("0.1.7", alternative="invoke", removal="1.0")
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def predict(
|
def predict(
|
||||||
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
|
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
|
||||||
@ -241,7 +241,7 @@ class BaseLanguageModel(
|
|||||||
Top model prediction as a string.
|
Top model prediction as a string.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
|
@deprecated("0.1.7", alternative="invoke", removal="1.0")
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def predict_messages(
|
def predict_messages(
|
||||||
self,
|
self,
|
||||||
@ -266,7 +266,7 @@ class BaseLanguageModel(
|
|||||||
Top model prediction as a message.
|
Top model prediction as a message.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@deprecated("0.1.7", alternative="ainvoke", removal="0.3.0")
|
@deprecated("0.1.7", alternative="ainvoke", removal="1.0")
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def apredict(
|
async def apredict(
|
||||||
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
|
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
|
||||||
@ -287,7 +287,7 @@ class BaseLanguageModel(
|
|||||||
Top model prediction as a string.
|
Top model prediction as a string.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@deprecated("0.1.7", alternative="ainvoke", removal="0.3.0")
|
@deprecated("0.1.7", alternative="ainvoke", removal="1.0")
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def apredict_messages(
|
async def apredict_messages(
|
||||||
self,
|
self,
|
||||||
|
@ -209,7 +209,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
|||||||
""" # noqa: E501
|
""" # noqa: E501
|
||||||
|
|
||||||
callback_manager: Optional[BaseCallbackManager] = deprecated(
|
callback_manager: Optional[BaseCallbackManager] = deprecated(
|
||||||
name="callback_manager", since="0.1.7", removal="0.3.0", alternative="callbacks"
|
name="callback_manager", since="0.1.7", removal="1.0", alternative="callbacks"
|
||||||
)(
|
)(
|
||||||
Field(
|
Field(
|
||||||
default=None,
|
default=None,
|
||||||
@ -1019,7 +1019,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
|||||||
break
|
break
|
||||||
yield item # type: ignore[misc]
|
yield item # type: ignore[misc]
|
||||||
|
|
||||||
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
|
@deprecated("0.1.7", alternative="invoke", removal="1.0")
|
||||||
def __call__(
|
def __call__(
|
||||||
self,
|
self,
|
||||||
messages: List[BaseMessage],
|
messages: List[BaseMessage],
|
||||||
@ -1051,13 +1051,13 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
|||||||
else:
|
else:
|
||||||
raise ValueError("Unexpected generation type")
|
raise ValueError("Unexpected generation type")
|
||||||
|
|
||||||
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
|
@deprecated("0.1.7", alternative="invoke", removal="1.0")
|
||||||
def call_as_llm(
|
def call_as_llm(
|
||||||
self, message: str, stop: Optional[List[str]] = None, **kwargs: Any
|
self, message: str, stop: Optional[List[str]] = None, **kwargs: Any
|
||||||
) -> str:
|
) -> str:
|
||||||
return self.predict(message, stop=stop, **kwargs)
|
return self.predict(message, stop=stop, **kwargs)
|
||||||
|
|
||||||
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
|
@deprecated("0.1.7", alternative="invoke", removal="1.0")
|
||||||
def predict(
|
def predict(
|
||||||
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
|
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
|
||||||
) -> str:
|
) -> str:
|
||||||
@ -1071,7 +1071,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
|||||||
else:
|
else:
|
||||||
raise ValueError("Cannot use predict when output is not a string.")
|
raise ValueError("Cannot use predict when output is not a string.")
|
||||||
|
|
||||||
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
|
@deprecated("0.1.7", alternative="invoke", removal="1.0")
|
||||||
def predict_messages(
|
def predict_messages(
|
||||||
self,
|
self,
|
||||||
messages: List[BaseMessage],
|
messages: List[BaseMessage],
|
||||||
@ -1085,7 +1085,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
|||||||
_stop = list(stop)
|
_stop = list(stop)
|
||||||
return self(messages, stop=_stop, **kwargs)
|
return self(messages, stop=_stop, **kwargs)
|
||||||
|
|
||||||
@deprecated("0.1.7", alternative="ainvoke", removal="0.3.0")
|
@deprecated("0.1.7", alternative="ainvoke", removal="1.0")
|
||||||
async def apredict(
|
async def apredict(
|
||||||
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
|
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
|
||||||
) -> str:
|
) -> str:
|
||||||
@ -1101,7 +1101,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
|
|||||||
else:
|
else:
|
||||||
raise ValueError("Cannot use predict when output is not a string.")
|
raise ValueError("Cannot use predict when output is not a string.")
|
||||||
|
|
||||||
@deprecated("0.1.7", alternative="ainvoke", removal="0.3.0")
|
@deprecated("0.1.7", alternative="ainvoke", removal="1.0")
|
||||||
async def apredict_messages(
|
async def apredict_messages(
|
||||||
self,
|
self,
|
||||||
messages: List[BaseMessage],
|
messages: List[BaseMessage],
|
||||||
|
@ -1150,7 +1150,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
|||||||
generations = [existing_prompts[i] for i in range(len(prompts))]
|
generations = [existing_prompts[i] for i in range(len(prompts))]
|
||||||
return LLMResult(generations=generations, llm_output=llm_output, run=run_info)
|
return LLMResult(generations=generations, llm_output=llm_output, run=run_info)
|
||||||
|
|
||||||
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
|
@deprecated("0.1.7", alternative="invoke", removal="1.0")
|
||||||
def __call__(
|
def __call__(
|
||||||
self,
|
self,
|
||||||
prompt: str,
|
prompt: str,
|
||||||
@ -1220,7 +1220,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
|||||||
)
|
)
|
||||||
return result.generations[0][0].text
|
return result.generations[0][0].text
|
||||||
|
|
||||||
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
|
@deprecated("0.1.7", alternative="invoke", removal="1.0")
|
||||||
def predict(
|
def predict(
|
||||||
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
|
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
|
||||||
) -> str:
|
) -> str:
|
||||||
@ -1230,7 +1230,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
|||||||
_stop = list(stop)
|
_stop = list(stop)
|
||||||
return self(text, stop=_stop, **kwargs)
|
return self(text, stop=_stop, **kwargs)
|
||||||
|
|
||||||
@deprecated("0.1.7", alternative="invoke", removal="0.3.0")
|
@deprecated("0.1.7", alternative="invoke", removal="1.0")
|
||||||
def predict_messages(
|
def predict_messages(
|
||||||
self,
|
self,
|
||||||
messages: List[BaseMessage],
|
messages: List[BaseMessage],
|
||||||
@ -1246,7 +1246,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
|||||||
content = self(text, stop=_stop, **kwargs)
|
content = self(text, stop=_stop, **kwargs)
|
||||||
return AIMessage(content=content)
|
return AIMessage(content=content)
|
||||||
|
|
||||||
@deprecated("0.1.7", alternative="ainvoke", removal="0.3.0")
|
@deprecated("0.1.7", alternative="ainvoke", removal="1.0")
|
||||||
async def apredict(
|
async def apredict(
|
||||||
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
|
self, text: str, *, stop: Optional[Sequence[str]] = None, **kwargs: Any
|
||||||
) -> str:
|
) -> str:
|
||||||
@ -1256,7 +1256,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
|
|||||||
_stop = list(stop)
|
_stop = list(stop)
|
||||||
return await self._call_async(text, stop=_stop, **kwargs)
|
return await self._call_async(text, stop=_stop, **kwargs)
|
||||||
|
|
||||||
@deprecated("0.1.7", alternative="ainvoke", removal="0.3.0")
|
@deprecated("0.1.7", alternative="ainvoke", removal="1.0")
|
||||||
async def apredict_messages(
|
async def apredict_messages(
|
||||||
self,
|
self,
|
||||||
messages: List[BaseMessage],
|
messages: List[BaseMessage],
|
||||||
|
@ -349,7 +349,7 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC):
|
|||||||
run_manager=run_manager.get_sync(),
|
run_manager=run_manager.get_sync(),
|
||||||
)
|
)
|
||||||
|
|
||||||
@deprecated(since="0.1.46", alternative="invoke", removal="0.3.0")
|
@deprecated(since="0.1.46", alternative="invoke", removal="1.0")
|
||||||
def get_relevant_documents(
|
def get_relevant_documents(
|
||||||
self,
|
self,
|
||||||
query: str,
|
query: str,
|
||||||
@ -393,7 +393,7 @@ class BaseRetriever(RunnableSerializable[RetrieverInput, RetrieverOutput], ABC):
|
|||||||
config["run_name"] = run_name
|
config["run_name"] = run_name
|
||||||
return self.invoke(query, config, **kwargs)
|
return self.invoke(query, config, **kwargs)
|
||||||
|
|
||||||
@deprecated(since="0.1.46", alternative="ainvoke", removal="0.3.0")
|
@deprecated(since="0.1.46", alternative="ainvoke", removal="1.0")
|
||||||
async def aget_relevant_documents(
|
async def aget_relevant_documents(
|
||||||
self,
|
self,
|
||||||
query: str,
|
query: str,
|
||||||
|
@ -332,7 +332,7 @@ class ChildTool(BaseTool):
|
|||||||
"""Callbacks to be called during tool execution."""
|
"""Callbacks to be called during tool execution."""
|
||||||
|
|
||||||
callback_manager: Optional[BaseCallbackManager] = deprecated(
|
callback_manager: Optional[BaseCallbackManager] = deprecated(
|
||||||
name="callback_manager", since="0.1.7", removal="0.3.0", alternative="callbacks"
|
name="callback_manager", since="0.1.7", removal="1.0", alternative="callbacks"
|
||||||
)(
|
)(
|
||||||
Field(
|
Field(
|
||||||
default=None,
|
default=None,
|
||||||
@ -740,7 +740,7 @@ class ChildTool(BaseTool):
|
|||||||
await run_manager.on_tool_end(output, color=color, name=self.name, **kwargs)
|
await run_manager.on_tool_end(output, color=color, name=self.name, **kwargs)
|
||||||
return output
|
return output
|
||||||
|
|
||||||
@deprecated("0.1.47", alternative="invoke", removal="0.3.0")
|
@deprecated("0.1.47", alternative="invoke", removal="1.0")
|
||||||
def __call__(self, tool_input: str, callbacks: Callbacks = None) -> str:
|
def __call__(self, tool_input: str, callbacks: Callbacks = None) -> str:
|
||||||
"""Make tool callable."""
|
"""Make tool callable."""
|
||||||
return self.run(tool_input, callbacks=callbacks)
|
return self.run(tool_input, callbacks=callbacks)
|
||||||
|
@ -15,7 +15,7 @@ from langchain_core.outputs import LLMResult
|
|||||||
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
|
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="Use string instead.", removal="0.3.0")
|
@deprecated("0.1.0", alternative="Use string instead.", removal="1.0")
|
||||||
def RunTypeEnum() -> Type[RunTypeEnumDep]:
|
def RunTypeEnum() -> Type[RunTypeEnumDep]:
|
||||||
"""RunTypeEnum."""
|
"""RunTypeEnum."""
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
@ -26,7 +26,7 @@ def RunTypeEnum() -> Type[RunTypeEnumDep]:
|
|||||||
return RunTypeEnumDep
|
return RunTypeEnumDep
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", removal="0.3.0")
|
@deprecated("0.1.0", removal="1.0")
|
||||||
class TracerSessionV1Base(BaseModel):
|
class TracerSessionV1Base(BaseModel):
|
||||||
"""Base class for TracerSessionV1."""
|
"""Base class for TracerSessionV1."""
|
||||||
|
|
||||||
@ -35,33 +35,33 @@ class TracerSessionV1Base(BaseModel):
|
|||||||
extra: Optional[Dict[str, Any]] = None
|
extra: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", removal="0.3.0")
|
@deprecated("0.1.0", removal="1.0")
|
||||||
class TracerSessionV1Create(TracerSessionV1Base):
|
class TracerSessionV1Create(TracerSessionV1Base):
|
||||||
"""Create class for TracerSessionV1."""
|
"""Create class for TracerSessionV1."""
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", removal="0.3.0")
|
@deprecated("0.1.0", removal="1.0")
|
||||||
class TracerSessionV1(TracerSessionV1Base):
|
class TracerSessionV1(TracerSessionV1Base):
|
||||||
"""TracerSessionV1 schema."""
|
"""TracerSessionV1 schema."""
|
||||||
|
|
||||||
id: int
|
id: int
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", removal="0.3.0")
|
@deprecated("0.1.0", removal="1.0")
|
||||||
class TracerSessionBase(TracerSessionV1Base):
|
class TracerSessionBase(TracerSessionV1Base):
|
||||||
"""Base class for TracerSession."""
|
"""Base class for TracerSession."""
|
||||||
|
|
||||||
tenant_id: UUID
|
tenant_id: UUID
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", removal="0.3.0")
|
@deprecated("0.1.0", removal="1.0")
|
||||||
class TracerSession(TracerSessionBase):
|
class TracerSession(TracerSessionBase):
|
||||||
"""TracerSessionV1 schema for the V2 API."""
|
"""TracerSessionV1 schema for the V2 API."""
|
||||||
|
|
||||||
id: UUID
|
id: UUID
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="Run", removal="0.3.0")
|
@deprecated("0.1.0", alternative="Run", removal="1.0")
|
||||||
class BaseRun(BaseModel):
|
class BaseRun(BaseModel):
|
||||||
"""Base class for Run."""
|
"""Base class for Run."""
|
||||||
|
|
||||||
@ -77,7 +77,7 @@ class BaseRun(BaseModel):
|
|||||||
error: Optional[str] = None
|
error: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="Run", removal="0.3.0")
|
@deprecated("0.1.0", alternative="Run", removal="1.0")
|
||||||
class LLMRun(BaseRun):
|
class LLMRun(BaseRun):
|
||||||
"""Class for LLMRun."""
|
"""Class for LLMRun."""
|
||||||
|
|
||||||
@ -85,7 +85,7 @@ class LLMRun(BaseRun):
|
|||||||
response: Optional[LLMResult] = None
|
response: Optional[LLMResult] = None
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="Run", removal="0.3.0")
|
@deprecated("0.1.0", alternative="Run", removal="1.0")
|
||||||
class ChainRun(BaseRun):
|
class ChainRun(BaseRun):
|
||||||
"""Class for ChainRun."""
|
"""Class for ChainRun."""
|
||||||
|
|
||||||
@ -96,7 +96,7 @@ class ChainRun(BaseRun):
|
|||||||
child_tool_runs: List[ToolRun] = Field(default_factory=list)
|
child_tool_runs: List[ToolRun] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="Run", removal="0.3.0")
|
@deprecated("0.1.0", alternative="Run", removal="1.0")
|
||||||
class ToolRun(BaseRun):
|
class ToolRun(BaseRun):
|
||||||
"""Class for ToolRun."""
|
"""Class for ToolRun."""
|
||||||
|
|
||||||
|
@ -81,7 +81,7 @@ def _rm_titles(kv: dict, prev_key: str = "") -> dict:
|
|||||||
@deprecated(
|
@deprecated(
|
||||||
"0.1.16",
|
"0.1.16",
|
||||||
alternative="langchain_core.utils.function_calling.convert_to_openai_function()",
|
alternative="langchain_core.utils.function_calling.convert_to_openai_function()",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
)
|
)
|
||||||
def convert_pydantic_to_openai_function(
|
def convert_pydantic_to_openai_function(
|
||||||
model: Type[BaseModel],
|
model: Type[BaseModel],
|
||||||
@ -121,7 +121,7 @@ def convert_pydantic_to_openai_function(
|
|||||||
@deprecated(
|
@deprecated(
|
||||||
"0.1.16",
|
"0.1.16",
|
||||||
alternative="langchain_core.utils.function_calling.convert_to_openai_tool()",
|
alternative="langchain_core.utils.function_calling.convert_to_openai_tool()",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
)
|
)
|
||||||
def convert_pydantic_to_openai_tool(
|
def convert_pydantic_to_openai_tool(
|
||||||
model: Type[BaseModel],
|
model: Type[BaseModel],
|
||||||
@ -155,7 +155,7 @@ def _get_python_function_name(function: Callable) -> str:
|
|||||||
@deprecated(
|
@deprecated(
|
||||||
"0.1.16",
|
"0.1.16",
|
||||||
alternative="langchain_core.utils.function_calling.convert_to_openai_function()",
|
alternative="langchain_core.utils.function_calling.convert_to_openai_function()",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
)
|
)
|
||||||
def convert_python_function_to_openai_function(
|
def convert_python_function_to_openai_function(
|
||||||
function: Callable,
|
function: Callable,
|
||||||
@ -268,7 +268,7 @@ def _convert_any_typed_dicts_to_pydantic(
|
|||||||
@deprecated(
|
@deprecated(
|
||||||
"0.1.16",
|
"0.1.16",
|
||||||
alternative="langchain_core.utils.function_calling.convert_to_openai_function()",
|
alternative="langchain_core.utils.function_calling.convert_to_openai_function()",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
)
|
)
|
||||||
def format_tool_to_openai_function(tool: BaseTool) -> FunctionDescription:
|
def format_tool_to_openai_function(tool: BaseTool) -> FunctionDescription:
|
||||||
"""Format tool into the OpenAI function API.
|
"""Format tool into the OpenAI function API.
|
||||||
@ -305,7 +305,7 @@ def format_tool_to_openai_function(tool: BaseTool) -> FunctionDescription:
|
|||||||
@deprecated(
|
@deprecated(
|
||||||
"0.1.16",
|
"0.1.16",
|
||||||
alternative="langchain_core.utils.function_calling.convert_to_openai_tool()",
|
alternative="langchain_core.utils.function_calling.convert_to_openai_tool()",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
)
|
)
|
||||||
def format_tool_to_openai_tool(tool: BaseTool) -> ToolDescription:
|
def format_tool_to_openai_tool(tool: BaseTool) -> ToolDescription:
|
||||||
"""Format tool into the OpenAI function API.
|
"""Format tool into the OpenAI function API.
|
||||||
|
@ -8,7 +8,7 @@ from langchain_core._api.deprecation import deprecated
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.1.30",
|
since="0.1.30",
|
||||||
removal="0.3",
|
removal="1.0",
|
||||||
message=(
|
message=(
|
||||||
"Using the hwchase17/langchain-hub "
|
"Using the hwchase17/langchain-hub "
|
||||||
"repo for prompts is deprecated. Please use "
|
"repo for prompts is deprecated. Please use "
|
||||||
|
@ -154,7 +154,7 @@ class InMemoryVectorStore(VectorStore):
|
|||||||
"It'll be removed in 0.3.0."
|
"It'll be removed in 0.3.0."
|
||||||
),
|
),
|
||||||
since="0.2.29",
|
since="0.2.29",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
)
|
)
|
||||||
def upsert(self, items: Sequence[Document], /, **kwargs: Any) -> UpsertResponse:
|
def upsert(self, items: Sequence[Document], /, **kwargs: Any) -> UpsertResponse:
|
||||||
vectors = self.embedding.embed_documents([item.page_content for item in items])
|
vectors = self.embedding.embed_documents([item.page_content for item in items])
|
||||||
@ -180,7 +180,7 @@ class InMemoryVectorStore(VectorStore):
|
|||||||
"It'll be removed in 0.3.0."
|
"It'll be removed in 0.3.0."
|
||||||
),
|
),
|
||||||
since="0.2.29",
|
since="0.2.29",
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
)
|
)
|
||||||
async def aupsert(
|
async def aupsert(
|
||||||
self, items: Sequence[Document], /, **kwargs: Any
|
self, items: Sequence[Document], /, **kwargs: Any
|
||||||
|
@ -126,7 +126,7 @@ def _destrip(tool_input: Any) -> Any:
|
|||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.54",
|
since="0.0.54",
|
||||||
removal="0.3",
|
removal="1.0",
|
||||||
alternative_import="langchain_anthropic.experimental.ChatAnthropicTools",
|
alternative_import="langchain_anthropic.experimental.ChatAnthropicTools",
|
||||||
)
|
)
|
||||||
class AnthropicFunctions(BaseChatModel):
|
class AnthropicFunctions(BaseChatModel):
|
||||||
|
@ -134,7 +134,7 @@ def parse_response(message: BaseMessage) -> str:
|
|||||||
|
|
||||||
|
|
||||||
@deprecated( # type: ignore[arg-type]
|
@deprecated( # type: ignore[arg-type]
|
||||||
since="0.0.64", removal="0.4.0", alternative_import="langchain_ollama.ChatOllama"
|
since="0.0.64", removal="1.0", alternative_import="langchain_ollama.ChatOllama"
|
||||||
)
|
)
|
||||||
class OllamaFunctions(ChatOllama):
|
class OllamaFunctions(ChatOllama):
|
||||||
"""Function chat model that uses Ollama API."""
|
"""Function chat model that uses Ollama API."""
|
||||||
|
@ -92,7 +92,7 @@ def create_importer(
|
|||||||
warn_deprecated(
|
warn_deprecated(
|
||||||
since="0.1",
|
since="0.1",
|
||||||
pending=False,
|
pending=False,
|
||||||
removal="0.4",
|
removal="1.0",
|
||||||
message=(
|
message=(
|
||||||
f"Importing {name} from {package} is deprecated. "
|
f"Importing {name} from {package} is deprecated. "
|
||||||
f"Please replace deprecated imports:\n\n"
|
f"Please replace deprecated imports:\n\n"
|
||||||
@ -124,7 +124,7 @@ def create_importer(
|
|||||||
warn_deprecated(
|
warn_deprecated(
|
||||||
since="0.1",
|
since="0.1",
|
||||||
pending=False,
|
pending=False,
|
||||||
removal="0.4",
|
removal="1.0",
|
||||||
message=(
|
message=(
|
||||||
f"Importing {name} from {package} is deprecated. "
|
f"Importing {name} from {package} is deprecated. "
|
||||||
f"Please replace deprecated imports:\n\n"
|
f"Please replace deprecated imports:\n\n"
|
||||||
|
@ -633,7 +633,7 @@ class RunnableMultiActionAgent(BaseMultiActionAgent):
|
|||||||
"Use new agent constructor methods like create_react_agent, create_json_agent, "
|
"Use new agent constructor methods like create_react_agent, create_json_agent, "
|
||||||
"create_structured_chat_agent, etc."
|
"create_structured_chat_agent, etc."
|
||||||
),
|
),
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
)
|
)
|
||||||
class LLMSingleActionAgent(BaseSingleActionAgent):
|
class LLMSingleActionAgent(BaseSingleActionAgent):
|
||||||
"""Base class for single action agents."""
|
"""Base class for single action agents."""
|
||||||
@ -724,7 +724,7 @@ class LLMSingleActionAgent(BaseSingleActionAgent):
|
|||||||
"Use new agent constructor methods like create_react_agent, create_json_agent, "
|
"Use new agent constructor methods like create_react_agent, create_json_agent, "
|
||||||
"create_structured_chat_agent, etc."
|
"create_structured_chat_agent, etc."
|
||||||
),
|
),
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
)
|
)
|
||||||
class Agent(BaseSingleActionAgent):
|
class Agent(BaseSingleActionAgent):
|
||||||
"""Agent that calls the language model and deciding the action.
|
"""Agent that calls the language model and deciding the action.
|
||||||
|
@ -11,7 +11,7 @@ from langchain_core._api import deprecated
|
|||||||
"Use new agent constructor methods like create_react_agent, create_json_agent, "
|
"Use new agent constructor methods like create_react_agent, create_json_agent, "
|
||||||
"create_structured_chat_agent, etc."
|
"create_structured_chat_agent, etc."
|
||||||
),
|
),
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
)
|
)
|
||||||
class AgentType(str, Enum):
|
class AgentType(str, Enum):
|
||||||
"""An enum for agent types.
|
"""An enum for agent types.
|
||||||
|
@ -25,7 +25,7 @@ from langchain.agents.utils import validate_tools_single_input
|
|||||||
from langchain.chains.llm import LLMChain
|
from langchain.chains.llm import LLMChain
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="create_react_agent", removal="0.3.0")
|
@deprecated("0.1.0", alternative="create_react_agent", removal="1.0")
|
||||||
class ChatAgent(Agent):
|
class ChatAgent(Agent):
|
||||||
"""Chat Agent."""
|
"""Chat Agent."""
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ from langchain.agents.utils import validate_tools_single_input
|
|||||||
from langchain.chains import LLMChain
|
from langchain.chains import LLMChain
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="create_react_agent", removal="0.3.0")
|
@deprecated("0.1.0", alternative="create_react_agent", removal="1.0")
|
||||||
class ConversationalAgent(Agent):
|
class ConversationalAgent(Agent):
|
||||||
"""An agent that holds a conversation in addition to using tools."""
|
"""An agent that holds a conversation in addition to using tools."""
|
||||||
|
|
||||||
|
@ -31,7 +31,7 @@ from langchain.agents.utils import validate_tools_single_input
|
|||||||
from langchain.chains import LLMChain
|
from langchain.chains import LLMChain
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="create_json_chat_agent", removal="0.3.0")
|
@deprecated("0.1.0", alternative="create_json_chat_agent", removal="1.0")
|
||||||
class ConversationalChatAgent(Agent):
|
class ConversationalChatAgent(Agent):
|
||||||
"""An agent designed to hold a conversation in addition to using tools."""
|
"""An agent designed to hold a conversation in addition to using tools."""
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ from langchain.agents.loading import AGENT_TO_CLASS, load_agent
|
|||||||
"Use new agent constructor methods like create_react_agent, create_json_agent, "
|
"Use new agent constructor methods like create_react_agent, create_json_agent, "
|
||||||
"create_structured_chat_agent, etc."
|
"create_structured_chat_agent, etc."
|
||||||
),
|
),
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
)
|
)
|
||||||
def initialize_agent(
|
def initialize_agent(
|
||||||
tools: Sequence[BaseTool],
|
tools: Sequence[BaseTool],
|
||||||
|
@ -31,7 +31,7 @@ def _load_agent_from_tools(
|
|||||||
return agent_cls.from_llm_and_tools(llm, tools, **combined_config)
|
return agent_cls.from_llm_and_tools(llm, tools, **combined_config)
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", removal="0.3.0")
|
@deprecated("0.1.0", removal="1.0")
|
||||||
def load_agent_from_config(
|
def load_agent_from_config(
|
||||||
config: dict,
|
config: dict,
|
||||||
llm: Optional[BaseLanguageModel] = None,
|
llm: Optional[BaseLanguageModel] = None,
|
||||||
@ -90,7 +90,7 @@ def load_agent_from_config(
|
|||||||
return agent_cls(**combined_config) # type: ignore
|
return agent_cls(**combined_config) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", removal="0.3.0")
|
@deprecated("0.1.0", removal="1.0")
|
||||||
def load_agent(
|
def load_agent(
|
||||||
path: Union[str, Path], **kwargs: Any
|
path: Union[str, Path], **kwargs: Any
|
||||||
) -> Union[BaseSingleActionAgent, BaseMultiActionAgent]:
|
) -> Union[BaseSingleActionAgent, BaseMultiActionAgent]:
|
||||||
|
@ -34,7 +34,7 @@ class ChainConfig(NamedTuple):
|
|||||||
action_description: str
|
action_description: str
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="create_react_agent", removal="0.3.0")
|
@deprecated("0.1.0", alternative="create_react_agent", removal="1.0")
|
||||||
class ZeroShotAgent(Agent):
|
class ZeroShotAgent(Agent):
|
||||||
"""Agent for the MRKL chain.
|
"""Agent for the MRKL chain.
|
||||||
|
|
||||||
@ -168,7 +168,7 @@ class ZeroShotAgent(Agent):
|
|||||||
super()._validate_tools(tools)
|
super()._validate_tools(tools)
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", removal="0.3.0")
|
@deprecated("0.1.0", removal="1.0")
|
||||||
class MRKLChain(AgentExecutor):
|
class MRKLChain(AgentExecutor):
|
||||||
"""Chain that implements the MRKL system."""
|
"""Chain that implements the MRKL system."""
|
||||||
|
|
||||||
|
@ -31,7 +31,7 @@ from langchain.agents.output_parsers.openai_functions import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="create_openai_functions_agent", removal="0.3.0")
|
@deprecated("0.1.0", alternative="create_openai_functions_agent", removal="1.0")
|
||||||
class OpenAIFunctionsAgent(BaseSingleActionAgent):
|
class OpenAIFunctionsAgent(BaseSingleActionAgent):
|
||||||
"""An Agent driven by OpenAIs function powered API.
|
"""An Agent driven by OpenAIs function powered API.
|
||||||
|
|
||||||
|
@ -94,7 +94,7 @@ def _parse_ai_message(message: BaseMessage) -> Union[List[AgentAction], AgentFin
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="create_openai_tools_agent", removal="0.3.0")
|
@deprecated("0.1.0", alternative="create_openai_tools_agent", removal="1.0")
|
||||||
class OpenAIMultiFunctionsAgent(BaseMultiActionAgent):
|
class OpenAIMultiFunctionsAgent(BaseMultiActionAgent):
|
||||||
"""Agent driven by OpenAIs function powered API.
|
"""Agent driven by OpenAIs function powered API.
|
||||||
|
|
||||||
|
@ -22,7 +22,7 @@ if TYPE_CHECKING:
|
|||||||
from langchain_community.docstore.base import Docstore
|
from langchain_community.docstore.base import Docstore
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", removal="0.3.0")
|
@deprecated("0.1.0", removal="1.0")
|
||||||
class ReActDocstoreAgent(Agent):
|
class ReActDocstoreAgent(Agent):
|
||||||
"""Agent for the ReAct chain."""
|
"""Agent for the ReAct chain."""
|
||||||
|
|
||||||
@ -69,7 +69,7 @@ class ReActDocstoreAgent(Agent):
|
|||||||
return "Thought:"
|
return "Thought:"
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", removal="0.3.0")
|
@deprecated("0.1.0", removal="1.0")
|
||||||
class DocstoreExplorer:
|
class DocstoreExplorer:
|
||||||
"""Class to assist with exploration of a document store."""
|
"""Class to assist with exploration of a document store."""
|
||||||
|
|
||||||
@ -119,7 +119,7 @@ class DocstoreExplorer:
|
|||||||
return self.document.page_content.split("\n\n")
|
return self.document.page_content.split("\n\n")
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", removal="0.3.0")
|
@deprecated("0.1.0", removal="1.0")
|
||||||
class ReActTextWorldAgent(ReActDocstoreAgent):
|
class ReActTextWorldAgent(ReActDocstoreAgent):
|
||||||
"""Agent for the ReAct TextWorld chain."""
|
"""Agent for the ReAct TextWorld chain."""
|
||||||
|
|
||||||
@ -139,7 +139,7 @@ class ReActTextWorldAgent(ReActDocstoreAgent):
|
|||||||
raise ValueError(f"Tool name should be Play, got {tool_names}")
|
raise ValueError(f"Tool name should be Play, got {tool_names}")
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", removal="0.3.0")
|
@deprecated("0.1.0", removal="1.0")
|
||||||
class ReActChain(AgentExecutor):
|
class ReActChain(AgentExecutor):
|
||||||
"""[Deprecated] Chain that implements the ReAct paper."""
|
"""[Deprecated] Chain that implements the ReAct paper."""
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ if TYPE_CHECKING:
|
|||||||
from langchain_community.utilities.serpapi import SerpAPIWrapper
|
from langchain_community.utilities.serpapi import SerpAPIWrapper
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="create_self_ask_with_search", removal="0.3.0")
|
@deprecated("0.1.0", alternative="create_self_ask_with_search", removal="1.0")
|
||||||
class SelfAskWithSearchAgent(Agent):
|
class SelfAskWithSearchAgent(Agent):
|
||||||
"""Agent for the self-ask-with-search paper."""
|
"""Agent for the self-ask-with-search paper."""
|
||||||
|
|
||||||
@ -67,7 +67,7 @@ class SelfAskWithSearchAgent(Agent):
|
|||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", removal="0.3.0")
|
@deprecated("0.1.0", removal="1.0")
|
||||||
class SelfAskWithSearchChain(AgentExecutor):
|
class SelfAskWithSearchChain(AgentExecutor):
|
||||||
"""[Deprecated] Chain that does self-ask with search."""
|
"""[Deprecated] Chain that does self-ask with search."""
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ from langchain.tools.render import ToolsRenderer, render_text_description_and_ar
|
|||||||
HUMAN_MESSAGE_TEMPLATE = "{input}\n\n{agent_scratchpad}"
|
HUMAN_MESSAGE_TEMPLATE = "{input}\n\n{agent_scratchpad}"
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="create_structured_chat_agent", removal="0.3.0")
|
@deprecated("0.1.0", alternative="create_structured_chat_agent", removal="1.0")
|
||||||
class StructuredChatAgent(Agent):
|
class StructuredChatAgent(Agent):
|
||||||
"""Structured Chat Agent."""
|
"""Structured Chat Agent."""
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ from langchain.chains.llm import LLMChain
|
|||||||
from langchain.tools.render import ToolsRenderer, render_text_description
|
from langchain.tools.render import ToolsRenderer, render_text_description
|
||||||
|
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="create_xml_agent", removal="0.3.0")
|
@deprecated("0.1.0", alternative="create_xml_agent", removal="1.0")
|
||||||
class XMLAgent(BaseSingleActionAgent):
|
class XMLAgent(BaseSingleActionAgent):
|
||||||
"""Agent that uses XML tags.
|
"""Agent that uses XML tags.
|
||||||
|
|
||||||
|
@ -334,7 +334,7 @@ class Chain(RunnableSerializable[Dict[str, Any], Dict[str, Any]], ABC):
|
|||||||
None, self._call, inputs, run_manager.get_sync() if run_manager else None
|
None, self._call, inputs, run_manager.get_sync() if run_manager else None
|
||||||
)
|
)
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="invoke", removal="0.3.0")
|
@deprecated("0.1.0", alternative="invoke", removal="1.0")
|
||||||
def __call__(
|
def __call__(
|
||||||
self,
|
self,
|
||||||
inputs: Union[Dict[str, Any], Any],
|
inputs: Union[Dict[str, Any], Any],
|
||||||
@ -385,7 +385,7 @@ class Chain(RunnableSerializable[Dict[str, Any], Dict[str, Any]], ABC):
|
|||||||
include_run_info=include_run_info,
|
include_run_info=include_run_info,
|
||||||
)
|
)
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="ainvoke", removal="0.3.0")
|
@deprecated("0.1.0", alternative="ainvoke", removal="1.0")
|
||||||
async def acall(
|
async def acall(
|
||||||
self,
|
self,
|
||||||
inputs: Union[Dict[str, Any], Any],
|
inputs: Union[Dict[str, Any], Any],
|
||||||
@ -544,7 +544,7 @@ class Chain(RunnableSerializable[Dict[str, Any], Dict[str, Any]], ABC):
|
|||||||
)
|
)
|
||||||
return self.output_keys[0]
|
return self.output_keys[0]
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="invoke", removal="0.3.0")
|
@deprecated("0.1.0", alternative="invoke", removal="1.0")
|
||||||
def run(
|
def run(
|
||||||
self,
|
self,
|
||||||
*args: Any,
|
*args: Any,
|
||||||
@ -615,7 +615,7 @@ class Chain(RunnableSerializable[Dict[str, Any], Dict[str, Any]], ABC):
|
|||||||
f" but not both. Got args: {args} and kwargs: {kwargs}."
|
f" but not both. Got args: {args} and kwargs: {kwargs}."
|
||||||
)
|
)
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="ainvoke", removal="0.3.0")
|
@deprecated("0.1.0", alternative="ainvoke", removal="1.0")
|
||||||
async def arun(
|
async def arun(
|
||||||
self,
|
self,
|
||||||
*args: Any,
|
*args: Any,
|
||||||
@ -753,7 +753,7 @@ class Chain(RunnableSerializable[Dict[str, Any], Dict[str, Any]], ABC):
|
|||||||
else:
|
else:
|
||||||
raise ValueError(f"{save_path} must be json or yaml")
|
raise ValueError(f"{save_path} must be json or yaml")
|
||||||
|
|
||||||
@deprecated("0.1.0", alternative="batch", removal="0.3.0")
|
@deprecated("0.1.0", alternative="batch", removal="1.0")
|
||||||
def apply(
|
def apply(
|
||||||
self, input_list: List[Dict[str, Any]], callbacks: Callbacks = None
|
self, input_list: List[Dict[str, Any]], callbacks: Callbacks = None
|
||||||
) -> List[Dict[str, str]]:
|
) -> List[Dict[str, str]]:
|
||||||
|
@ -242,7 +242,7 @@ class BaseConversationalRetrievalChain(Chain):
|
|||||||
"create_history_aware_retriever together with create_retrieval_chain "
|
"create_history_aware_retriever together with create_retrieval_chain "
|
||||||
"(see example in docstring)"
|
"(see example in docstring)"
|
||||||
),
|
),
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
)
|
)
|
||||||
class ConversationalRetrievalChain(BaseConversationalRetrievalChain):
|
class ConversationalRetrievalChain(BaseConversationalRetrievalChain):
|
||||||
"""Chain for having a conversation based on retrieved documents.
|
"""Chain for having a conversation based on retrieved documents.
|
||||||
|
@ -43,7 +43,7 @@ __all__ = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@deprecated(since="0.1.1", removal="0.3.0", alternative="create_openai_fn_runnable")
|
@deprecated(since="0.1.1", removal="1.0", alternative="create_openai_fn_runnable")
|
||||||
def create_openai_fn_chain(
|
def create_openai_fn_chain(
|
||||||
functions: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable]],
|
functions: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable]],
|
||||||
llm: BaseLanguageModel,
|
llm: BaseLanguageModel,
|
||||||
@ -145,7 +145,7 @@ def create_openai_fn_chain(
|
|||||||
|
|
||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.1.1", removal="0.3.0", alternative="ChatOpenAI.with_structured_output"
|
since="0.1.1", removal="1.0", alternative="ChatOpenAI.with_structured_output"
|
||||||
)
|
)
|
||||||
def create_structured_output_chain(
|
def create_structured_output_chain(
|
||||||
output_schema: Union[Dict[str, Any], Type[BaseModel]],
|
output_schema: Union[Dict[str, Any], Type[BaseModel]],
|
||||||
|
@ -58,7 +58,7 @@ Passage:
|
|||||||
"feedback here:"
|
"feedback here:"
|
||||||
"<https://github.com/langchain-ai/langchain/discussions/18154>"
|
"<https://github.com/langchain-ai/langchain/discussions/18154>"
|
||||||
),
|
),
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative=(
|
alternative=(
|
||||||
"""
|
"""
|
||||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||||
@ -128,7 +128,7 @@ def create_extraction_chain(
|
|||||||
"feedback here:"
|
"feedback here:"
|
||||||
"<https://github.com/langchain-ai/langchain/discussions/18154>"
|
"<https://github.com/langchain-ai/langchain/discussions/18154>"
|
||||||
),
|
),
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative=(
|
alternative=(
|
||||||
"""
|
"""
|
||||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||||
|
@ -29,7 +29,7 @@ If a property is not present and is not required in the function parameters, do
|
|||||||
"feedback here:"
|
"feedback here:"
|
||||||
"<https://github.com/langchain-ai/langchain/discussions/18154>"
|
"<https://github.com/langchain-ai/langchain/discussions/18154>"
|
||||||
),
|
),
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative=(
|
alternative=(
|
||||||
"""
|
"""
|
||||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||||
|
@ -41,7 +41,7 @@ from langchain_core.utils.pydantic import is_basemodel_subclass
|
|||||||
"feedback here: "
|
"feedback here: "
|
||||||
"<https://github.com/langchain-ai/langchain/discussions/18154>"
|
"<https://github.com/langchain-ai/langchain/discussions/18154>"
|
||||||
),
|
),
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative=(
|
alternative=(
|
||||||
"""
|
"""
|
||||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||||
@ -159,7 +159,7 @@ def create_openai_fn_runnable(
|
|||||||
"feedback here: "
|
"feedback here: "
|
||||||
"<https://github.com/langchain-ai/langchain/discussions/18154>"
|
"<https://github.com/langchain-ai/langchain/discussions/18154>"
|
||||||
),
|
),
|
||||||
removal="0.3.0",
|
removal="1.0",
|
||||||
alternative=(
|
alternative=(
|
||||||
"""
|
"""
|
||||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||||
|
@ -13,7 +13,7 @@ from langchain.retrievers.document_compressors.base import BaseDocumentCompresso
|
|||||||
|
|
||||||
|
|
||||||
@deprecated(
|
@deprecated(
|
||||||
since="0.0.30", removal="0.3.0", alternative_import="langchain_cohere.CohereRerank"
|
since="0.0.30", removal="1.0", alternative_import="langchain_cohere.CohereRerank"
|
||||||
)
|
)
|
||||||
class CohereRerank(BaseDocumentCompressor):
|
class CohereRerank(BaseDocumentCompressor):
|
||||||
"""Document compressor that uses `Cohere Rerank API`."""
|
"""Document compressor that uses `Cohere Rerank API`."""
|
||||||
|
Loading…
Reference in New Issue
Block a user