core[minor],community[minor]: Upgrade all @root_validator() to @pre_init (#23841)

This PR introduces a @pre_init decorator that's a @root_validator(pre=True) but with all the defaults populated!
This commit is contained in:
Eugene Yurtsev 2024-07-08 16:09:29 -04:00 committed by GitHub
parent f152d6ed3d
commit 2c180d645e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
114 changed files with 439 additions and 276 deletions

View File

@ -9,8 +9,8 @@ from typing import Any, Callable, Dict, List, Union
from langchain_core._api.deprecation import deprecated from langchain_core._api.deprecation import deprecated
from langchain_core.outputs import ChatResult from langchain_core.outputs import ChatResult
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
from langchain_community.chat_models.openai import ChatOpenAI from langchain_community.chat_models.openai import ChatOpenAI
from langchain_community.utils.openai import is_openai_v1 from langchain_community.utils.openai import is_openai_v1
@ -106,7 +106,7 @@ class AzureChatOpenAI(ChatOpenAI):
"""Get the namespace of the langchain object.""" """Get the namespace of the langchain object."""
return ["langchain", "chat_models", "azure_openai"] return ["langchain", "chat_models", "azure_openai"]
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
if values["n"] < 1: if values["n"] < 1:

View File

@ -50,11 +50,10 @@ from langchain_core.pydantic_v1 import (
Extra, Extra,
Field, Field,
SecretStr, SecretStr,
root_validator,
) )
from langchain_core.runnables import Runnable, RunnableMap, RunnablePassthrough from langchain_core.runnables import Runnable, RunnableMap, RunnablePassthrough
from langchain_core.tools import BaseTool from langchain_core.tools import BaseTool
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_core.utils.function_calling import convert_to_openai_tool from langchain_core.utils.function_calling import convert_to_openai_tool
from langchain_community.utilities.requests import Requests from langchain_community.utilities.requests import Requests
@ -300,7 +299,7 @@ class ChatEdenAI(BaseChatModel):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key exists in environment.""" """Validate that api key exists in environment."""
values["edenai_api_key"] = convert_to_secret_str( values["edenai_api_key"] = convert_to_secret_str(

View File

@ -21,8 +21,8 @@ from langchain_core.outputs import (
ChatGeneration, ChatGeneration,
ChatResult, ChatResult,
) )
from langchain_core.pydantic_v1 import BaseModel, SecretStr, root_validator from langchain_core.pydantic_v1 import BaseModel, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from tenacity import ( from tenacity import (
before_sleep_log, before_sleep_log,
retry, retry,
@ -261,7 +261,7 @@ class ChatGooglePalm(BaseChatModel, BaseModel):
"""Get the namespace of the langchain object.""" """Get the namespace of the langchain object."""
return ["langchain", "chat_models", "google_palm"] return ["langchain", "chat_models", "google_palm"]
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate api key, python package exists, temperature, top_p, and top_k.""" """Validate api key, python package exists, temperature, top_p, and top_k."""
google_api_key = convert_to_secret_str( google_api_key = convert_to_secret_str(

View File

@ -29,6 +29,7 @@ from langchain_core.utils import (
convert_to_secret_str, convert_to_secret_str,
get_from_dict_or_env, get_from_dict_or_env,
get_pydantic_field_names, get_pydantic_field_names,
pre_init,
) )
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -190,7 +191,7 @@ class ChatHunyuan(BaseChatModel):
values["model_kwargs"] = extra values["model_kwargs"] = extra
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
values["hunyuan_api_base"] = get_from_dict_or_env( values["hunyuan_api_base"] = get_from_dict_or_env(
values, values,

View File

@ -45,6 +45,7 @@ from langchain_core.utils import (
convert_to_secret_str, convert_to_secret_str,
get_from_dict_or_env, get_from_dict_or_env,
get_pydantic_field_names, get_pydantic_field_names,
pre_init,
) )
from tenacity import ( from tenacity import (
before_sleep_log, before_sleep_log,
@ -218,7 +219,7 @@ class JinaChat(BaseChatModel):
values["model_kwargs"] = extra values["model_kwargs"] = extra
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["jinachat_api_key"] = convert_to_secret_str( values["jinachat_api_key"] = convert_to_secret_str(

View File

@ -11,6 +11,8 @@ from importlib.metadata import version
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING, Any, Dict, List, Optional, cast from typing import TYPE_CHECKING, Any, Dict, List, Optional, cast
from langchain_core.utils import pre_init
if TYPE_CHECKING: if TYPE_CHECKING:
import gpudb import gpudb
@ -24,7 +26,7 @@ from langchain_core.messages import (
) )
from langchain_core.output_parsers.transform import BaseOutputParser from langchain_core.output_parsers.transform import BaseOutputParser
from langchain_core.outputs import ChatGeneration, ChatResult, Generation from langchain_core.outputs import ChatGeneration, ChatResult, Generation
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator from langchain_core.pydantic_v1 import BaseModel, Field
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
@ -341,7 +343,7 @@ class ChatKinetica(BaseChatModel):
kdbc: Any = Field(exclude=True) kdbc: Any = Field(exclude=True)
""" Kinetica DB connection. """ """ Kinetica DB connection. """
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Pydantic object validator.""" """Pydantic object validator."""

View File

@ -23,8 +23,8 @@ from langchain_core.callbacks import (
) )
from langchain_core.messages import AIMessageChunk, BaseMessage from langchain_core.messages import AIMessageChunk, BaseMessage
from langchain_core.outputs import ChatGenerationChunk, ChatResult from langchain_core.outputs import ChatGenerationChunk, ChatResult
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Field, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.adapters.openai import ( from langchain_community.adapters.openai import (
convert_message_to_dict, convert_message_to_dict,
@ -85,7 +85,7 @@ class ChatKonko(ChatOpenAI):
max_tokens: int = 20 max_tokens: int = 20
"""Maximum number of tokens to generate.""" """Maximum number of tokens to generate."""
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["konko_api_key"] = convert_to_secret_str( values["konko_api_key"] = convert_to_secret_str(

View File

@ -48,10 +48,10 @@ from langchain_core.outputs import (
ChatGenerationChunk, ChatGenerationChunk,
ChatResult, ChatResult,
) )
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_core.runnables import Runnable from langchain_core.runnables import Runnable
from langchain_core.tools import BaseTool from langchain_core.tools import BaseTool
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
from langchain_core.utils.function_calling import convert_to_openai_tool from langchain_core.utils.function_calling import convert_to_openai_tool
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -249,7 +249,7 @@ class ChatLiteLLM(BaseChatModel):
return _completion_with_retry(**kwargs) return _completion_with_retry(**kwargs)
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate api key, python package exists, temperature, top_p, and top_k.""" """Validate api key, python package exists, temperature, top_p, and top_k."""
try: try:

View File

@ -2,10 +2,10 @@
from typing import Dict from typing import Dict
from langchain_core.pydantic_v1 import root_validator
from langchain_core.utils import ( from langchain_core.utils import (
convert_to_secret_str, convert_to_secret_str,
get_from_dict_or_env, get_from_dict_or_env,
pre_init,
) )
from langchain_community.chat_models import ChatOpenAI from langchain_community.chat_models import ChatOpenAI
@ -29,7 +29,7 @@ class MoonshotChat(MoonshotCommon, ChatOpenAI): # type: ignore[misc]
moonshot = MoonshotChat(model="moonshot-v1-8k") moonshot = MoonshotChat(model="moonshot-v1-8k")
""" """
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that the environment is set up correctly.""" """Validate that the environment is set up correctly."""
values["moonshot_api_key"] = convert_to_secret_str( values["moonshot_api_key"] = convert_to_secret_str(

View File

@ -2,8 +2,8 @@
from typing import Dict from typing import Dict
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Field, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.chat_models.openai import ChatOpenAI from langchain_community.chat_models.openai import ChatOpenAI
from langchain_community.utils.openai import is_openai_v1 from langchain_community.utils.openai import is_openai_v1
@ -48,7 +48,7 @@ class ChatOctoAI(ChatOpenAI):
def is_lc_serializable(cls) -> bool: def is_lc_serializable(cls) -> bool:
return False return False
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["octoai_api_base"] = get_from_dict_or_env( values["octoai_api_base"] = get_from_dict_or_env(

View File

@ -49,6 +49,7 @@ from langchain_core.runnables import Runnable
from langchain_core.utils import ( from langchain_core.utils import (
get_from_dict_or_env, get_from_dict_or_env,
get_pydantic_field_names, get_pydantic_field_names,
pre_init,
) )
from langchain_community.adapters.openai import ( from langchain_community.adapters.openai import (
@ -274,7 +275,7 @@ class ChatOpenAI(BaseChatModel):
values["model_kwargs"] = extra values["model_kwargs"] = extra
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
if values["n"] < 1: if values["n"] < 1:

View File

@ -40,9 +40,8 @@ from langchain_core.pydantic_v1 import (
Extra, Extra,
Field, Field,
SecretStr, SecretStr,
root_validator,
) )
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
if TYPE_CHECKING: if TYPE_CHECKING:
from premai.api.chat_completions.v1_chat_completions_create import ( from premai.api.chat_completions.v1_chat_completions_create import (
@ -249,7 +248,7 @@ class ChatPremAI(BaseChatModel, BaseModel):
allow_population_by_field_name = True allow_population_by_field_name = True
arbitrary_types_allowed = True arbitrary_types_allowed = True
@root_validator() @pre_init
def validate_environments(cls, values: Dict) -> Dict: def validate_environments(cls, values: Dict) -> Dict:
"""Validate that the package is installed and that the API token is valid""" """Validate that the package is installed and that the API token is valid"""
try: try:

View File

@ -16,6 +16,7 @@ from langchain_core.utils import (
convert_to_secret_str, convert_to_secret_str,
get_from_dict_or_env, get_from_dict_or_env,
get_pydantic_field_names, get_pydantic_field_names,
pre_init,
) )
from langchain_core.utils.utils import build_extra_kwargs from langchain_core.utils.utils import build_extra_kwargs
@ -135,7 +136,7 @@ class ChatSnowflakeCortex(BaseChatModel):
) )
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
try: try:
from snowflake.snowpark import Session from snowflake.snowpark import Session

View File

@ -3,8 +3,8 @@
from typing import Dict from typing import Dict
from langchain_core._api import deprecated from langchain_core._api import deprecated
from langchain_core.pydantic_v1 import Field, root_validator from langchain_core.pydantic_v1 import Field
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
from langchain_community.chat_models import ChatOpenAI from langchain_community.chat_models import ChatOpenAI
from langchain_community.llms.solar import SOLAR_SERVICE_URL_BASE, SolarCommon from langchain_community.llms.solar import SOLAR_SERVICE_URL_BASE, SolarCommon
@ -37,7 +37,7 @@ class SolarChat(SolarCommon, ChatOpenAI):
arbitrary_types_allowed = True arbitrary_types_allowed = True
extra = "ignore" extra = "ignore"
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that the environment is set up correctly.""" """Validate that the environment is set up correctly."""
values["solar_api_key"] = get_from_dict_or_env( values["solar_api_key"] = get_from_dict_or_env(

View File

@ -49,10 +49,10 @@ from langchain_core.outputs import (
ChatGenerationChunk, ChatGenerationChunk,
ChatResult, ChatResult,
) )
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr
from langchain_core.runnables import Runnable from langchain_core.runnables import Runnable
from langchain_core.tools import BaseTool from langchain_core.tools import BaseTool
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_core.utils.function_calling import convert_to_openai_tool from langchain_core.utils.function_calling import convert_to_openai_tool
from requests.exceptions import HTTPError from requests.exceptions import HTTPError
from tenacity import ( from tenacity import (
@ -431,7 +431,7 @@ class ChatTongyi(BaseChatModel):
"""Return type of llm.""" """Return type of llm."""
return "tongyi" return "tongyi"
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["dashscope_api_key"] = convert_to_secret_str( values["dashscope_api_key"] = convert_to_secret_str(

View File

@ -27,7 +27,7 @@ from langchain_core.messages import (
SystemMessage, SystemMessage,
) )
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
from langchain_core.pydantic_v1 import root_validator from langchain_core.utils import pre_init
from langchain_community.llms.vertexai import ( from langchain_community.llms.vertexai import (
_VertexAICommon, _VertexAICommon,
@ -225,7 +225,7 @@ class ChatVertexAI(_VertexAICommon, BaseChatModel):
"""Get the namespace of the langchain object.""" """Get the namespace of the langchain object."""
return ["langchain", "chat_models", "vertexai"] return ["langchain", "chat_models", "vertexai"]
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that the python package exists in environment.""" """Validate that the python package exists in environment."""
is_gemini = is_gemini_model(values["model_name"]) is_gemini = is_gemini_model(values["model_name"])

View File

@ -44,6 +44,7 @@ from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
from langchain_core.utils import ( from langchain_core.utils import (
get_from_dict_or_env, get_from_dict_or_env,
get_pydantic_field_names, get_pydantic_field_names,
pre_init,
) )
from tenacity import ( from tenacity import (
before_sleep_log, before_sleep_log,
@ -166,7 +167,7 @@ class ChatYuan2(BaseChatModel):
values["model_kwargs"] = extra values["model_kwargs"] = extra
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["yuan2_api_key"] = get_from_dict_or_env( values["yuan2_api_key"] = get_from_dict_or_env(

View File

@ -4,8 +4,8 @@ from __future__ import annotations
from typing import Dict from typing import Dict
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Field, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.embeddings.openai import OpenAIEmbeddings from langchain_community.embeddings.openai import OpenAIEmbeddings
from langchain_community.utils.openai import is_openai_v1 from langchain_community.utils.openai import is_openai_v1
@ -34,7 +34,7 @@ class AnyscaleEmbeddings(OpenAIEmbeddings):
"anyscale_api_key": "ANYSCALE_API_KEY", "anyscale_api_key": "ANYSCALE_API_KEY",
} }
@root_validator() @pre_init
def validate_environment(cls, values: dict) -> dict: def validate_environment(cls, values: dict) -> dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["anyscale_api_key"] = convert_to_secret_str( values["anyscale_api_key"] = convert_to_secret_str(

View File

@ -4,8 +4,8 @@ import logging
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -48,7 +48,7 @@ class QianfanEmbeddingsEndpoint(BaseModel, Embeddings):
model_kwargs: Dict[str, Any] = Field(default_factory=dict) model_kwargs: Dict[str, Any] = Field(default_factory=dict)
"""extra params for model invoke using with `do`.""" """extra params for model invoke using with `do`."""
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
""" """
Validate whether qianfan_ak and qianfan_sk in the environment variables or Validate whether qianfan_ak and qianfan_sk in the environment variables or

View File

@ -2,8 +2,8 @@ from typing import Any, Dict, List, Mapping, Optional
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, Extra
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
DEFAULT_MODEL_ID = "sentence-transformers/clip-ViT-B-32" DEFAULT_MODEL_ID = "sentence-transformers/clip-ViT-B-32"
MAX_BATCH_SIZE = 1024 MAX_BATCH_SIZE = 1024
@ -59,7 +59,7 @@ class DeepInfraEmbeddings(BaseModel, Embeddings):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
deepinfra_api_token = get_from_dict_or_env( deepinfra_api_token = get_from_dict_or_env(

View File

@ -6,9 +6,8 @@ from langchain_core.pydantic_v1 import (
Extra, Extra,
Field, Field,
SecretStr, SecretStr,
root_validator,
) )
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.utilities.requests import Requests from langchain_community.utilities.requests import Requests
@ -35,7 +34,7 @@ class EdenAiEmbeddings(BaseModel, Embeddings):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key exists in environment.""" """Validate that api key exists in environment."""
values["edenai_api_key"] = convert_to_secret_str( values["edenai_api_key"] = convert_to_secret_str(

View File

@ -2,8 +2,8 @@ from typing import Any, Dict, List, Mapping, Optional
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr, root_validator from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from requests.adapters import HTTPAdapter, Retry from requests.adapters import HTTPAdapter, Retry
from typing_extensions import NotRequired, TypedDict from typing_extensions import NotRequired, TypedDict
@ -61,7 +61,7 @@ class EmbaasEmbeddings(BaseModel, Embeddings):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
embaas_api_key = convert_to_secret_str( embaas_api_key = convert_to_secret_str(

View File

@ -6,9 +6,9 @@ from typing import Dict, List, Optional
import requests import requests
from langchain_core._api.deprecation import deprecated from langchain_core._api.deprecation import deprecated
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, root_validator from langchain_core.pydantic_v1 import BaseModel
from langchain_core.runnables.config import run_in_executor from langchain_core.runnables.config import run_in_executor
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -31,7 +31,7 @@ class ErnieEmbeddings(BaseModel, Embeddings):
_lock = threading.Lock() _lock = threading.Lock()
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
values["ernie_api_base"] = get_from_dict_or_env( values["ernie_api_base"] = get_from_dict_or_env(
values, "ernie_api_base", "ERNIE_API_BASE", "https://aip.baidubce.com" values, "ernie_api_base", "ERNIE_API_BASE", "https://aip.baidubce.com"

View File

@ -2,7 +2,8 @@ from typing import Any, Dict, List, Literal, Optional
import numpy as np import numpy as np
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, Extra
from langchain_core.utils import pre_init
class FastEmbedEmbeddings(BaseModel, Embeddings): class FastEmbedEmbeddings(BaseModel, Embeddings):
@ -54,7 +55,7 @@ class FastEmbedEmbeddings(BaseModel, Embeddings):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that FastEmbed has been installed.""" """Validate that FastEmbed has been installed."""
model_name = values.get("model_name") model_name = values.get("model_name")

View File

@ -5,7 +5,8 @@ from functools import cached_property
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, root_validator from langchain_core.pydantic_v1 import BaseModel
from langchain_core.utils import pre_init
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -77,7 +78,7 @@ class GigaChatEmbeddings(BaseModel, Embeddings):
key_file_password=self.key_file_password, key_file_password=self.key_file_password,
) )
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate authenticate data in environment and python package is installed.""" """Validate authenticate data in environment and python package is installed."""
try: try:

View File

@ -4,8 +4,8 @@ import logging
from typing import Any, Callable, Dict, List, Optional from typing import Any, Callable, Dict, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, root_validator from langchain_core.pydantic_v1 import BaseModel
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
from tenacity import ( from tenacity import (
before_sleep_log, before_sleep_log,
retry, retry,
@ -62,7 +62,7 @@ class GooglePalmEmbeddings(BaseModel, Embeddings):
show_progress_bar: bool = False show_progress_bar: bool = False
"""Whether to show a tqdm progress bar. Must have `tqdm` installed.""" """Whether to show a tqdm progress bar. Must have `tqdm` installed."""
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate api key, python package exists.""" """Validate api key, python package exists."""
google_api_key = get_from_dict_or_env( google_api_key = get_from_dict_or_env(

View File

@ -2,7 +2,8 @@ from typing import Any, Dict, List, Optional
import numpy as np import numpy as np
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, Extra
from langchain_core.utils import pre_init
LASER_MULTILINGUAL_MODEL: str = "laser2" LASER_MULTILINGUAL_MODEL: str = "laser2"
@ -41,7 +42,7 @@ class LaserEmbeddings(BaseModel, Embeddings):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that laser_encoders has been installed.""" """Validate that laser_encoders has been installed."""
try: try:

View File

@ -4,8 +4,8 @@ from typing import Dict, List, Optional
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr, root_validator from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
class LLMRailsEmbeddings(BaseModel, Embeddings): class LLMRailsEmbeddings(BaseModel, Embeddings):
@ -37,7 +37,7 @@ class LLMRailsEmbeddings(BaseModel, Embeddings):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key exists in environment.""" """Validate that api key exists in environment."""
api_key = convert_to_secret_str( api_key = convert_to_secret_str(

View File

@ -17,7 +17,11 @@ from typing import (
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator
from langchain_core.utils import get_from_dict_or_env, get_pydantic_field_names from langchain_core.utils import (
get_from_dict_or_env,
get_pydantic_field_names,
pre_init,
)
from tenacity import ( from tenacity import (
AsyncRetrying, AsyncRetrying,
before_sleep_log, before_sleep_log,
@ -193,7 +197,7 @@ class LocalAIEmbeddings(BaseModel, Embeddings):
values["model_kwargs"] = extra values["model_kwargs"] = extra
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["openai_api_key"] = get_from_dict_or_env( values["openai_api_key"] = get_from_dict_or_env(

View File

@ -5,8 +5,8 @@ from typing import Any, Callable, Dict, List, Optional
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr, root_validator from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from tenacity import ( from tenacity import (
before_sleep_log, before_sleep_log,
retry, retry,
@ -84,7 +84,7 @@ class MiniMaxEmbeddings(BaseModel, Embeddings):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that group id and api key exists in environment.""" """Validate that group id and api key exists in environment."""
minimax_group_id = get_from_dict_or_env( minimax_group_id = get_from_dict_or_env(

View File

@ -8,7 +8,8 @@ import aiohttp
import requests import requests
from langchain_core._api.deprecation import deprecated from langchain_core._api.deprecation import deprecated
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, root_validator from langchain_core.pydantic_v1 import BaseModel
from langchain_core.utils import pre_init
def is_endpoint_live(url: str, headers: Optional[dict], payload: Any) -> bool: def is_endpoint_live(url: str, headers: Optional[dict], payload: Any) -> bool:
@ -58,7 +59,7 @@ class NeMoEmbeddings(BaseModel, Embeddings):
model: str = "NV-Embed-QA-003" model: str = "NV-Embed-QA-003"
api_endpoint_url: str = "http://localhost:8088/v1/embeddings" api_endpoint_url: str = "http://localhost:8088/v1/embeddings"
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that the end point is alive using the values that are provided.""" """Validate that the end point is alive using the values that are provided."""

View File

@ -1,8 +1,8 @@
from typing import Any, Dict, List from typing import Any, Dict, List
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, root_validator from langchain_core.pydantic_v1 import BaseModel
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
class NLPCloudEmbeddings(BaseModel, Embeddings): class NLPCloudEmbeddings(BaseModel, Embeddings):
@ -30,7 +30,7 @@ class NLPCloudEmbeddings(BaseModel, Embeddings):
) -> None: ) -> None:
super().__init__(model_name=model_name, gpu=gpu, **kwargs) super().__init__(model_name=model_name, gpu=gpu, **kwargs)
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
nlpcloud_api_key = get_from_dict_or_env( nlpcloud_api_key = get_from_dict_or_env(

View File

@ -2,7 +2,8 @@ from enum import Enum
from typing import Any, Dict, Iterator, List, Mapping, Optional from typing import Any, Dict, Iterator, List, Mapping, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, Extra
from langchain_core.utils import pre_init
CUSTOM_ENDPOINT_PREFIX = "ocid1.generativeaiendpoint" CUSTOM_ENDPOINT_PREFIX = "ocid1.generativeaiendpoint"
@ -89,7 +90,7 @@ class OCIGenAIEmbeddings(BaseModel, Embeddings):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: # pylint: disable=no-self-argument def validate_environment(cls, values: Dict) -> Dict: # pylint: disable=no-self-argument
"""Validate that OCI config and python package exists in environment.""" """Validate that OCI config and python package exists in environment."""

View File

@ -1,7 +1,7 @@
from typing import Dict from typing import Dict
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Field, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.embeddings.openai import OpenAIEmbeddings from langchain_community.embeddings.openai import OpenAIEmbeddings
from langchain_community.utils.openai import is_openai_v1 from langchain_community.utils.openai import is_openai_v1
@ -38,7 +38,7 @@ class OctoAIEmbeddings(OpenAIEmbeddings):
def lc_secrets(self) -> Dict[str, str]: def lc_secrets(self) -> Dict[str, str]:
return {"octoai_api_token": "OCTOAI_API_TOKEN"} return {"octoai_api_token": "OCTOAI_API_TOKEN"}
@root_validator() @pre_init
def validate_environment(cls, values: dict) -> dict: def validate_environment(cls, values: dict) -> dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["endpoint_url"] = get_from_dict_or_env( values["endpoint_url"] = get_from_dict_or_env(

View File

@ -22,7 +22,11 @@ import numpy as np
from langchain_core._api.deprecation import deprecated from langchain_core._api.deprecation import deprecated
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator
from langchain_core.utils import get_from_dict_or_env, get_pydantic_field_names from langchain_core.utils import (
get_from_dict_or_env,
get_pydantic_field_names,
pre_init,
)
from tenacity import ( from tenacity import (
AsyncRetrying, AsyncRetrying,
before_sleep_log, before_sleep_log,
@ -282,7 +286,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
values["model_kwargs"] = extra values["model_kwargs"] = extra
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["openai_api_key"] = get_from_dict_or_env( values["openai_api_key"] = get_from_dict_or_env(

View File

@ -5,8 +5,8 @@ from typing import Any, Callable, Dict, List, Optional, Union
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.language_models.llms import create_base_retry_decorator from langchain_core.language_models.llms import create_base_retry_decorator
from langchain_core.pydantic_v1 import BaseModel, SecretStr, root_validator from langchain_core.pydantic_v1 import BaseModel, SecretStr
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -32,7 +32,7 @@ class PremAIEmbeddings(BaseModel, Embeddings):
client: Any client: Any
@root_validator() @pre_init
def validate_environments(cls, values: Dict) -> Dict: def validate_environments(cls, values: Dict) -> Dict:
"""Validate that the package is installed and that the API token is valid""" """Validate that the package is installed and that the API token is valid"""
try: try:

View File

@ -1,7 +1,8 @@
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, Extra
from langchain_core.utils import pre_init
from langchain_community.llms.sagemaker_endpoint import ContentHandlerBase from langchain_community.llms.sagemaker_endpoint import ContentHandlerBase
@ -115,7 +116,7 @@ class SagemakerEndpointEmbeddings(BaseModel, Embeddings):
extra = Extra.forbid extra = Extra.forbid
arbitrary_types_allowed = True arbitrary_types_allowed = True
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Dont do anything if client provided externally""" """Dont do anything if client provided externally"""
if values.get("client") is not None: if values.get("client") is not None:

View File

@ -3,8 +3,8 @@ from typing import Dict, Generator, List, Optional
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, root_validator from langchain_core.pydantic_v1 import BaseModel
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
class SambaStudioEmbeddings(BaseModel, Embeddings): class SambaStudioEmbeddings(BaseModel, Embeddings):
@ -64,7 +64,7 @@ class SambaStudioEmbeddings(BaseModel, Embeddings):
batch_size: int = 32 batch_size: int = 32
"""Batch size for the embedding models""" """Batch size for the embedding models"""
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["sambastudio_embeddings_base_url"] = get_from_dict_or_env( values["sambastudio_embeddings_base_url"] = get_from_dict_or_env(

View File

@ -6,8 +6,8 @@ from typing import Any, Callable, Dict, List, Optional
import requests import requests
from langchain_core._api import deprecated from langchain_core._api import deprecated
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr, root_validator from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from tenacity import ( from tenacity import (
before_sleep_log, before_sleep_log,
retry, retry,
@ -80,7 +80,7 @@ class SolarEmbeddings(BaseModel, Embeddings):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate api key exists in environment.""" """Validate api key exists in environment."""
solar_api_key = convert_to_secret_str( solar_api_key = convert_to_secret_str(

View File

@ -8,7 +8,7 @@ from typing import Any, Dict, List, Literal, Optional, Tuple
from langchain_core._api.deprecation import deprecated from langchain_core._api.deprecation import deprecated
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.language_models.llms import create_base_retry_decorator from langchain_core.language_models.llms import create_base_retry_decorator
from langchain_core.pydantic_v1 import root_validator from langchain_core.utils import pre_init
from langchain_community.llms.vertexai import _VertexAICommon from langchain_community.llms.vertexai import _VertexAICommon
from langchain_community.utilities.vertexai import raise_vertex_import_error from langchain_community.utilities.vertexai import raise_vertex_import_error
@ -33,7 +33,7 @@ class VertexAIEmbeddings(_VertexAICommon, Embeddings):
show_progress_bar: bool = False show_progress_bar: bool = False
"""Whether to show a tqdm progress bar. Must have `tqdm` installed.""" """Whether to show a tqdm progress bar. Must have `tqdm` installed."""
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validates that the python package exists in environment.""" """Validates that the python package exists in environment."""
cls._try_init_vertexai(values) cls._try_init_vertexai(values)

View File

@ -4,8 +4,8 @@ import logging
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, root_validator from langchain_core.pydantic_v1 import BaseModel
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -43,7 +43,7 @@ class VolcanoEmbeddings(BaseModel, Embeddings):
client: Any client: Any
"""volcano client""" """volcano client"""
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
""" """
Validate whether volcano_ak and volcano_sk in the environment variables or Validate whether volcano_ak and volcano_sk in the environment variables or

View File

@ -7,8 +7,8 @@ import time
from typing import Any, Callable, Dict, List, Sequence from typing import Any, Callable, Dict, List, Sequence
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from tenacity import ( from tenacity import (
before_sleep_log, before_sleep_log,
retry, retry,
@ -76,7 +76,7 @@ class YandexGPTEmbeddings(BaseModel, Embeddings):
allow_population_by_field_name = True allow_population_by_field_name = True
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that iam token exists in environment.""" """Validate that iam token exists in environment."""

View File

@ -3,8 +3,8 @@ from typing import Any, Dict, List, Optional, cast
import requests import requests
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr, root_validator from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
class AI21PenaltyData(BaseModel): class AI21PenaltyData(BaseModel):
@ -73,7 +73,7 @@ class AI21(LLM):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key exists in environment.""" """Validate that api key exists in environment."""
ai21_api_key = convert_to_secret_str( ai21_api_key = convert_to_secret_str(

View File

@ -2,8 +2,8 @@ from typing import Any, Dict, List, Optional, Sequence
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, root_validator from langchain_core.pydantic_v1 import Extra, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -129,7 +129,7 @@ class AlephAlpha(LLM):
"""Stop sequences to use.""" """Stop sequences to use."""
# Client params # Client params
aleph_alpha_api_key: Optional[str] = None aleph_alpha_api_key: Optional[SecretStr] = None
"""API key for Aleph Alpha API.""" """API key for Aleph Alpha API."""
host: str = "https://api.aleph-alpha.com" host: str = "https://api.aleph-alpha.com"
"""The hostname of the API host. """The hostname of the API host.
@ -167,7 +167,7 @@ class AlephAlpha(LLM):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["aleph_alpha_api_key"] = convert_to_secret_str( values["aleph_alpha_api_key"] = convert_to_secret_str(

View File

@ -25,6 +25,7 @@ from langchain_core.utils import (
check_package_version, check_package_version,
get_from_dict_or_env, get_from_dict_or_env,
get_pydantic_field_names, get_pydantic_field_names,
pre_init,
) )
from langchain_core.utils.utils import build_extra_kwargs, convert_to_secret_str from langchain_core.utils.utils import build_extra_kwargs, convert_to_secret_str
@ -74,7 +75,7 @@ class _AnthropicCommon(BaseLanguageModel):
) )
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["anthropic_api_key"] = convert_to_secret_str( values["anthropic_api_key"] = convert_to_secret_str(
@ -185,7 +186,7 @@ class Anthropic(LLM, _AnthropicCommon):
allow_population_by_field_name = True allow_population_by_field_name = True
arbitrary_types_allowed = True arbitrary_types_allowed = True
@root_validator() @pre_init
def raise_warning(cls, values: Dict) -> Dict: def raise_warning(cls, values: Dict) -> Dict:
"""Raise warning that this class is deprecated.""" """Raise warning that this class is deprecated."""
warnings.warn( warnings.warn(

View File

@ -14,8 +14,8 @@ from langchain_core.callbacks import (
CallbackManagerForLLMRun, CallbackManagerForLLMRun,
) )
from langchain_core.outputs import Generation, GenerationChunk, LLMResult from langchain_core.outputs import Generation, GenerationChunk, LLMResult
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Field, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.llms.openai import ( from langchain_community.llms.openai import (
BaseOpenAI, BaseOpenAI,
@ -93,7 +93,7 @@ class Anyscale(BaseOpenAI):
def is_lc_serializable(cls) -> bool: def is_lc_serializable(cls) -> bool:
return False return False
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["anyscale_api_base"] = get_from_dict_or_env( values["anyscale_api_base"] = get_from_dict_or_env(

View File

@ -3,7 +3,8 @@ from typing import Any, Dict, List, Optional
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models import BaseLLM from langchain_core.language_models import BaseLLM
from langchain_core.outputs import Generation, LLMResult from langchain_core.outputs import Generation, LLMResult
from langchain_core.pydantic_v1 import Field, root_validator from langchain_core.pydantic_v1 import Field
from langchain_core.utils import pre_init
class Aphrodite(BaseLLM): class Aphrodite(BaseLLM):
@ -157,7 +158,7 @@ class Aphrodite(BaseLLM):
client: Any #: :meta private: client: Any #: :meta private:
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that python package exists in environment.""" """Validate that python package exists in environment."""

View File

@ -7,8 +7,8 @@ from typing import Any, Dict, List, Optional
import requests import requests
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Field, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -31,7 +31,7 @@ class BaichuanLLM(LLM):
baichuan_api_host: Optional[str] = None baichuan_api_host: Optional[str] = None
baichuan_api_key: Optional[SecretStr] = None baichuan_api_key: Optional[SecretStr] = None
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
values["baichuan_api_key"] = convert_to_secret_str( values["baichuan_api_key"] = convert_to_secret_str(
get_from_dict_or_env(values, "baichuan_api_key", "BAICHUAN_API_KEY") get_from_dict_or_env(values, "baichuan_api_key", "BAICHUAN_API_KEY")

View File

@ -16,8 +16,8 @@ from langchain_core.callbacks import (
) )
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.outputs import GenerationChunk from langchain_core.outputs import GenerationChunk
from langchain_core.pydantic_v1 import Field, root_validator from langchain_core.pydantic_v1 import Field
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -76,7 +76,7 @@ class QianfanLLMEndpoint(LLM):
In the case of other model, passing these params will not affect the result. In the case of other model, passing these params will not affect the result.
""" """
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
values["qianfan_ak"] = convert_to_secret_str( values["qianfan_ak"] = convert_to_secret_str(
get_from_dict_or_env( get_from_dict_or_env(

View File

@ -4,7 +4,7 @@ from typing import Any, Dict, List, Mapping, Optional, cast
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -63,7 +63,7 @@ class Banana(LLM):
values["model_kwargs"] = extra values["model_kwargs"] = extra
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
banana_api_key = convert_to_secret_str( banana_api_key = convert_to_secret_str(

View File

@ -10,7 +10,7 @@ import requests
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, Field, root_validator from langchain_core.pydantic_v1 import Extra, Field, root_validator
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -95,7 +95,7 @@ class Beam(LLM):
values["model_kwargs"] = extra values["model_kwargs"] = extra
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
beam_client_id = get_from_dict_or_env( beam_client_id = get_from_dict_or_env(

View File

@ -21,8 +21,8 @@ from langchain_core.callbacks import (
) )
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.outputs import GenerationChunk from langchain_core.outputs import GenerationChunk
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator from langchain_core.pydantic_v1 import BaseModel, Extra, Field
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
from langchain_community.utilities.anthropic import ( from langchain_community.utilities.anthropic import (
@ -389,7 +389,7 @@ class BedrockBase(BaseModel, ABC):
...Logic to handle guardrail intervention... ...Logic to handle guardrail intervention...
""" # noqa: E501 """ # noqa: E501
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that AWS credentials to and python package exists in environment.""" """Validate that AWS credentials to and python package exists in environment."""
@ -743,7 +743,7 @@ class Bedrock(LLM, BedrockBase):
""" """
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
model_id = values["model_id"] model_id = values["model_id"]
if model_id.startswith("anthropic.claude-3"): if model_id.startswith("anthropic.claude-3"):

View File

@ -5,7 +5,7 @@ import requests
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -62,7 +62,7 @@ class CerebriumAI(LLM):
values["model_kwargs"] = extra values["model_kwargs"] = extra
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
cerebriumai_api_key = convert_to_secret_str( cerebriumai_api_key = convert_to_secret_str(

View File

@ -4,7 +4,8 @@ from typing import Any, Dict, List, Optional
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.outputs import Generation, LLMResult from langchain_core.outputs import Generation, LLMResult
from langchain_core.pydantic_v1 import Extra, Field, root_validator from langchain_core.pydantic_v1 import Extra, Field
from langchain_core.utils import pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -53,7 +54,7 @@ class Clarifai(LLM):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that we have all required info to access Clarifai """Validate that we have all required info to access Clarifai
platform and python package exists in environment.""" platform and python package exists in environment."""

View File

@ -10,8 +10,8 @@ from langchain_core.callbacks import (
) )
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.load.serializable import Serializable from langchain_core.load.serializable import Serializable
from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Extra, Field, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from tenacity import ( from tenacity import (
before_sleep_log, before_sleep_log,
retry, retry,
@ -95,7 +95,7 @@ class BaseCohere(Serializable):
user_agent: str = "langchain" user_agent: str = "langchain"
"""Identifier for the application making the request.""" """Identifier for the application making the request."""
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
try: try:

View File

@ -6,7 +6,7 @@ from langchain_core.callbacks import (
CallbackManagerForLLMRun, CallbackManagerForLLMRun,
) )
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import root_validator from langchain_core.utils import pre_init
class CTransformers(LLM): class CTransformers(LLM):
@ -57,7 +57,7 @@ class CTransformers(LLM):
"""Return type of llm.""" """Return type of llm."""
return "ctransformers" return "ctransformers"
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that ``ctransformers`` package is installed.""" """Validate that ``ctransformers`` package is installed."""
try: try:

View File

@ -3,7 +3,8 @@ from typing import Any, Dict, List, Optional, Union
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import BaseLLM from langchain_core.language_models.llms import BaseLLM
from langchain_core.outputs import Generation, LLMResult from langchain_core.outputs import Generation, LLMResult
from langchain_core.pydantic_v1 import Field, root_validator from langchain_core.pydantic_v1 import Field
from langchain_core.utils import pre_init
class CTranslate2(BaseLLM): class CTranslate2(BaseLLM):
@ -50,7 +51,7 @@ class CTranslate2(BaseLLM):
explicitly specified. explicitly specified.
""" """
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that python package exists in environment.""" """Validate that python package exists in environment."""

View File

@ -8,8 +8,8 @@ from langchain_core.callbacks import (
) )
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.outputs import GenerationChunk from langchain_core.outputs import GenerationChunk
from langchain_core.pydantic_v1 import Extra, root_validator from langchain_core.pydantic_v1 import Extra
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
from langchain_community.utilities.requests import Requests from langchain_community.utilities.requests import Requests
@ -43,7 +43,7 @@ class DeepInfra(LLM):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
deepinfra_api_token = get_from_dict_or_env( deepinfra_api_token = get_from_dict_or_env(

View File

@ -1,12 +1,18 @@
# flake8: noqa # flake8: noqa
from langchain_core.utils import pre_init
from typing import Any, AsyncIterator, Dict, Iterator, List, Optional, Union from typing import Any, AsyncIterator, Dict, Iterator, List, Optional, Union
from langchain_core.utils import pre_init
from langchain_core.pydantic_v1 import root_validator from langchain_core.pydantic_v1 import root_validator
from langchain_core.utils import pre_init
from langchain_core.callbacks import ( from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun, AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun, CallbackManagerForLLMRun,
) )
from langchain_core.utils import pre_init
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.utils import pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
from langchain_core.utils import pre_init
from langchain_core.outputs import GenerationChunk from langchain_core.outputs import GenerationChunk
@ -55,7 +61,7 @@ class DeepSparse(LLM):
"""Return type of llm.""" """Return type of llm."""
return "deepsparse" return "deepsparse"
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that ``deepsparse`` package is installed.""" """Validate that ``deepsparse`` package is installed."""
try: try:

View File

@ -10,7 +10,7 @@ from langchain_core.callbacks import (
) )
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, Field, root_validator from langchain_core.pydantic_v1 import Extra, Field, root_validator
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
from langchain_community.utilities.requests import Requests from langchain_community.utilities.requests import Requests
@ -73,7 +73,7 @@ class EdenAI(LLM):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key exists in environment.""" """Validate that api key exists in environment."""
values["edenai_api_key"] = get_from_dict_or_env( values["edenai_api_key"] = get_from_dict_or_env(

View File

@ -3,7 +3,8 @@ from typing import Any, Dict, Iterator, List, Optional
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models import LLM from langchain_core.language_models import LLM
from langchain_core.outputs import GenerationChunk from langchain_core.outputs import GenerationChunk
from langchain_core.pydantic_v1 import Field, root_validator from langchain_core.pydantic_v1 import Field
from langchain_core.utils import pre_init
class ExLlamaV2(LLM): class ExLlamaV2(LLM):
@ -58,7 +59,7 @@ class ExLlamaV2(LLM):
disallowed_tokens: List[int] = Field(None) disallowed_tokens: List[int] = Field(None)
"""List of tokens to disallow during generation.""" """List of tokens to disallow during generation."""
@root_validator() @pre_init
def validate_environment(cls, values: Dict[str, Any]) -> Dict[str, Any]: def validate_environment(cls, values: Dict[str, Any]) -> Dict[str, Any]:
try: try:
import torch import torch

View File

@ -9,8 +9,8 @@ from langchain_core.callbacks import (
) )
from langchain_core.language_models.llms import BaseLLM, create_base_retry_decorator from langchain_core.language_models.llms import BaseLLM, create_base_retry_decorator
from langchain_core.outputs import Generation, GenerationChunk, LLMResult from langchain_core.outputs import Generation, GenerationChunk, LLMResult
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Field, SecretStr
from langchain_core.utils import convert_to_secret_str from langchain_core.utils import convert_to_secret_str, pre_init
from langchain_core.utils.env import get_from_dict_or_env from langchain_core.utils.env import get_from_dict_or_env
@ -61,7 +61,7 @@ class Fireworks(BaseLLM):
"""Get the namespace of the langchain object.""" """Get the namespace of the langchain object."""
return ["langchain", "llms", "fireworks"] return ["langchain", "llms", "fireworks"]
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key in environment.""" """Validate that api key in environment."""
try: try:

View File

@ -10,7 +10,8 @@ from langchain_core.callbacks.manager import (
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.load.serializable import Serializable from langchain_core.load.serializable import Serializable
from langchain_core.outputs import GenerationChunk, LLMResult from langchain_core.outputs import GenerationChunk, LLMResult
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Field, SecretStr
from langchain_core.utils import pre_init
from langchain_core.utils.env import get_from_dict_or_env from langchain_core.utils.env import get_from_dict_or_env
from langchain_core.utils.utils import convert_to_secret_str from langchain_core.utils.utils import convert_to_secret_str
@ -66,7 +67,7 @@ class BaseFriendli(Serializable):
# is used by default. # is used by default.
top_p: Optional[float] = None top_p: Optional[float] = None
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate if personal access token is provided in environment.""" """Validate if personal access token is provided in environment."""
try: try:

View File

@ -11,7 +11,7 @@ from langchain_core.callbacks import (
from langchain_core.language_models.llms import BaseLLM from langchain_core.language_models.llms import BaseLLM
from langchain_core.load.serializable import Serializable from langchain_core.load.serializable import Serializable
from langchain_core.outputs import Generation, GenerationChunk, LLMResult from langchain_core.outputs import Generation, GenerationChunk, LLMResult
from langchain_core.pydantic_v1 import root_validator from langchain_core.utils import pre_init
if TYPE_CHECKING: if TYPE_CHECKING:
import gigachat import gigachat
@ -113,7 +113,7 @@ class _BaseGigaChat(Serializable):
verbose=self.verbose, verbose=self.verbose,
) )
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate authenticate data in environment and python package is installed.""" """Validate authenticate data in environment and python package is installed."""
try: try:

View File

@ -6,8 +6,8 @@ from langchain_core._api.deprecation import deprecated
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models import LanguageModelInput from langchain_core.language_models import LanguageModelInput
from langchain_core.outputs import Generation, GenerationChunk, LLMResult from langchain_core.outputs import Generation, GenerationChunk, LLMResult
from langchain_core.pydantic_v1 import BaseModel, SecretStr, root_validator from langchain_core.pydantic_v1 import BaseModel, SecretStr
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
from langchain_community.llms import BaseLLM from langchain_community.llms import BaseLLM
from langchain_community.utilities.vertexai import create_retry_decorator from langchain_community.utilities.vertexai import create_retry_decorator
@ -107,7 +107,7 @@ class GooglePalm(BaseLLM, BaseModel):
"""Get the namespace of the langchain object.""" """Get the namespace of the langchain object."""
return ["langchain", "llms", "google_palm"] return ["langchain", "llms", "google_palm"]
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate api key, python package exists.""" """Validate api key, python package exists."""
google_api_key = get_from_dict_or_env( google_api_key = get_from_dict_or_env(

View File

@ -4,7 +4,7 @@ from typing import Any, Dict, List, Mapping, Optional
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -86,7 +86,7 @@ class GooseAI(LLM):
values["model_kwargs"] = extra values["model_kwargs"] = extra
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
gooseai_api_key = convert_to_secret_str( gooseai_api_key = convert_to_secret_str(

View File

@ -3,7 +3,8 @@ from typing import Any, Dict, List, Mapping, Optional, Set
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, Field, root_validator from langchain_core.pydantic_v1 import Extra, Field
from langchain_core.utils import pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -127,7 +128,7 @@ class GPT4All(LLM):
"streaming": self.streaming, "streaming": self.streaming,
} }
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that the python package exists in the environment.""" """Validate that the python package exists in the environment."""
try: try:

View File

@ -10,7 +10,11 @@ from langchain_core.callbacks import (
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.outputs import GenerationChunk from langchain_core.outputs import GenerationChunk
from langchain_core.pydantic_v1 import Extra, Field, root_validator from langchain_core.pydantic_v1 import Extra, Field, root_validator
from langchain_core.utils import get_from_dict_or_env, get_pydantic_field_names from langchain_core.utils import (
get_from_dict_or_env,
get_pydantic_field_names,
pre_init,
)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -162,7 +166,7 @@ class HuggingFaceEndpoint(LLM):
values["model"] = values.get("endpoint_url") or values.get("repo_id") values["model"] = values.get("endpoint_url") or values.get("repo_id")
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that package is installed and that the API token is valid.""" """Validate that package is installed and that the API token is valid."""
try: try:

View File

@ -4,8 +4,8 @@ from typing import Any, Dict, List, Mapping, Optional
from langchain_core._api.deprecation import deprecated from langchain_core._api.deprecation import deprecated
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, root_validator from langchain_core.pydantic_v1 import Extra
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -61,7 +61,7 @@ class HuggingFaceHub(LLM):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
huggingfacehub_api_token = get_from_dict_or_env( huggingfacehub_api_token = get_from_dict_or_env(

View File

@ -9,7 +9,7 @@ from langchain_core.callbacks import (
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.outputs import GenerationChunk from langchain_core.outputs import GenerationChunk
from langchain_core.pydantic_v1 import Extra, Field, root_validator from langchain_core.pydantic_v1 import Extra, Field, root_validator
from langchain_core.utils import get_pydantic_field_names from langchain_core.utils import get_pydantic_field_names, pre_init
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -134,7 +134,7 @@ class HuggingFaceTextGenInference(LLM):
values["model_kwargs"] = extra values["model_kwargs"] = extra
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that python package exists in environment.""" """Validate that python package exists in environment."""

View File

@ -8,7 +8,7 @@ from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.outputs import GenerationChunk from langchain_core.outputs import GenerationChunk
from langchain_core.pydantic_v1 import Field, root_validator from langchain_core.pydantic_v1 import Field, root_validator
from langchain_core.utils import get_pydantic_field_names from langchain_core.utils import get_pydantic_field_names, pre_init
from langchain_core.utils.utils import build_extra_kwargs from langchain_core.utils.utils import build_extra_kwargs
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -133,7 +133,7 @@ class LlamaCpp(LLM):
verbose: bool = True verbose: bool = True
"""Print verbose output to stderr.""" """Print verbose output to stderr."""
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that llama-cpp-python library is installed.""" """Validate that llama-cpp-python library is installed."""
try: try:

View File

@ -2,7 +2,8 @@ from typing import Any, Dict, List, Mapping, Optional
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, root_validator from langchain_core.pydantic_v1 import Extra
from langchain_core.utils import pre_init
class ManifestWrapper(LLM): class ManifestWrapper(LLM):
@ -16,7 +17,7 @@ class ManifestWrapper(LLM):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that python package exists in environment.""" """Validate that python package exists in environment."""
try: try:

View File

@ -16,7 +16,7 @@ from langchain_core.callbacks import (
) )
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -72,7 +72,7 @@ class MinimaxCommon(BaseModel):
minimax_group_id: Optional[str] = None minimax_group_id: Optional[str] = None
minimax_api_key: Optional[SecretStr] = None minimax_api_key: Optional[SecretStr] = None
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["minimax_api_key"] = convert_to_secret_str( values["minimax_api_key"] = convert_to_secret_str(

View File

@ -4,7 +4,7 @@ import requests
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models import LLM from langchain_core.language_models import LLM
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -79,7 +79,7 @@ class MoonshotCommon(BaseModel):
""" """
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["moonshot_api_key"] = convert_to_secret_str( values["moonshot_api_key"] = convert_to_secret_str(

View File

@ -3,8 +3,8 @@ from typing import Any, Dict, List, Mapping, Optional
import requests import requests
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, root_validator from langchain_core.pydantic_v1 import Extra
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -64,7 +64,7 @@ class MosaicML(LLM):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
mosaicml_api_token = get_from_dict_or_env( mosaicml_api_token = get_from_dict_or_env(

View File

@ -2,8 +2,8 @@ from typing import Any, Dict, List, Mapping, Optional
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, SecretStr, root_validator from langchain_core.pydantic_v1 import Extra, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
class NLPCloud(LLM): class NLPCloud(LLM):
@ -56,7 +56,7 @@ class NLPCloud(LLM):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["nlpcloud_api_key"] = convert_to_secret_str( values["nlpcloud_api_key"] = convert_to_secret_str(

View File

@ -4,8 +4,8 @@ from typing import Any, Dict, List, Optional
import requests import requests
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Field, root_validator from langchain_core.pydantic_v1 import Field
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -47,7 +47,7 @@ class OCIModelDeploymentLLM(LLM):
"""Stop words to use when generating. Model output is cut off """Stop words to use when generating. Model output is cut off
at the first occurrence of any of these substrings.""" at the first occurrence of any of these substrings."""
@root_validator() @pre_init
def validate_environment( # pylint: disable=no-self-argument def validate_environment( # pylint: disable=no-self-argument
cls, values: Dict cls, values: Dict
) -> Dict: ) -> Dict:

View File

@ -8,7 +8,8 @@ from typing import Any, Dict, Iterator, List, Mapping, Optional
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.outputs import GenerationChunk from langchain_core.outputs import GenerationChunk
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, Extra
from langchain_core.utils import pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -99,7 +100,7 @@ class OCIGenAIBase(BaseModel, ABC):
is_stream: bool = False is_stream: bool = False
"""Whether to stream back partial progress""" """Whether to stream back partial progress"""
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that OCI config and python package exists in environment.""" """Validate that OCI config and python package exists in environment."""

View File

@ -1,7 +1,7 @@
from typing import Any, Dict from typing import Any, Dict
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Field, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.llms.openai import BaseOpenAI from langchain_community.llms.openai import BaseOpenAI
from langchain_community.utils.openai import is_openai_v1 from langchain_community.utils.openai import is_openai_v1
@ -66,7 +66,7 @@ class OctoAIEndpoint(BaseOpenAI):
"""Return type of llm.""" """Return type of llm."""
return "octoai_endpoint" return "octoai_endpoint"
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["octoai_api_base"] = get_from_dict_or_env( values["octoai_api_base"] = get_from_dict_or_env(

View File

@ -5,8 +5,8 @@ from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models import BaseLanguageModel from langchain_core.language_models import BaseLanguageModel
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.messages import AIMessage from langchain_core.messages import AIMessage
from langchain_core.pydantic_v1 import Extra, root_validator from langchain_core.pydantic_v1 import Extra
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -38,7 +38,7 @@ class OpaquePrompts(LLM):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validates that the OpaquePrompts API key and the Python package exist.""" """Validates that the OpaquePrompts API key and the Python package exist."""
try: try:

View File

@ -29,7 +29,11 @@ from langchain_core.callbacks import (
from langchain_core.language_models.llms import BaseLLM, create_base_retry_decorator from langchain_core.language_models.llms import BaseLLM, create_base_retry_decorator
from langchain_core.outputs import Generation, GenerationChunk, LLMResult from langchain_core.outputs import Generation, GenerationChunk, LLMResult
from langchain_core.pydantic_v1 import Field, root_validator from langchain_core.pydantic_v1 import Field, root_validator
from langchain_core.utils import get_from_dict_or_env, get_pydantic_field_names from langchain_core.utils import (
get_from_dict_or_env,
get_pydantic_field_names,
pre_init,
)
from langchain_core.utils.utils import build_extra_kwargs from langchain_core.utils.utils import build_extra_kwargs
from langchain_community.utils.openai import is_openai_v1 from langchain_community.utils.openai import is_openai_v1
@ -269,7 +273,7 @@ class BaseOpenAI(BaseLLM):
) )
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
if values["n"] < 1: if values["n"] < 1:
@ -818,7 +822,7 @@ class AzureOpenAI(BaseOpenAI):
"""Get the namespace of the langchain object.""" """Get the namespace of the langchain object."""
return ["langchain", "llms", "openai"] return ["langchain", "llms", "openai"]
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
if values["n"] < 1: if values["n"] < 1:
@ -1025,7 +1029,7 @@ class OpenAIChat(BaseLLM):
values["model_kwargs"] = extra values["model_kwargs"] = extra
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
openai_api_key = get_from_dict_or_env( openai_api_key = get_from_dict_or_env(

View File

@ -1,6 +1,6 @@
from typing import Any, Dict from typing import Any, Dict
from langchain_core.pydantic_v1 import root_validator from langchain_core.utils import pre_init
from langchain_community.llms.openai import BaseOpenAI from langchain_community.llms.openai import BaseOpenAI
@ -16,7 +16,7 @@ class OpenLM(BaseOpenAI):
def _invocation_params(self) -> Dict[str, Any]: def _invocation_params(self) -> Dict[str, Any]:
return {**{"model": self.model_name}, **super()._invocation_params} return {**{"model": self.model_name}, **super()._invocation_params}
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
try: try:
import openlm import openlm

View File

@ -6,8 +6,7 @@ import requests
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.outputs import GenerationChunk from langchain_core.outputs import GenerationChunk
from langchain_core.pydantic_v1 import root_validator from langchain_core.utils import get_from_dict_or_env, pre_init
from langchain_core.utils import get_from_dict_or_env
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -52,7 +51,7 @@ class PaiEasEndpoint(LLM):
version: Optional[str] = "2.0" version: Optional[str] = "2.0"
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["eas_service_url"] = get_from_dict_or_env( values["eas_service_url"] = get_from_dict_or_env(

View File

@ -4,7 +4,7 @@ from typing import Any, Dict, List, Mapping, Optional
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -86,7 +86,7 @@ class Petals(LLM):
values["model_kwargs"] = extra values["model_kwargs"] = extra
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
huggingface_api_key = convert_to_secret_str( huggingface_api_key = convert_to_secret_str(

View File

@ -10,7 +10,7 @@ from langchain_core.pydantic_v1 import (
SecretStr, SecretStr,
root_validator, root_validator,
) )
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -65,7 +65,7 @@ class PipelineAI(LLM, BaseModel):
values["pipeline_kwargs"] = extra values["pipeline_kwargs"] = extra
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
pipeline_api_key = convert_to_secret_str( pipeline_api_key = convert_to_secret_str(

View File

@ -3,8 +3,8 @@ from typing import Any, Dict, List, Optional
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, root_validator from langchain_core.pydantic_v1 import Extra
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -53,7 +53,7 @@ class PredictionGuard(LLM):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that the access token and python package exists in environment.""" """Validate that the access token and python package exists in environment."""
token = get_from_dict_or_env(values, "token", "PREDICTIONGUARD_TOKEN") token = get_from_dict_or_env(values, "token", "PREDICTIONGUARD_TOKEN")

View File

@ -7,7 +7,7 @@ from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.outputs import GenerationChunk from langchain_core.outputs import GenerationChunk
from langchain_core.pydantic_v1 import Extra, Field, root_validator from langchain_core.pydantic_v1 import Extra, Field, root_validator
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
if TYPE_CHECKING: if TYPE_CHECKING:
from replicate.prediction import Prediction from replicate.prediction import Prediction
@ -97,7 +97,7 @@ class Replicate(LLM):
values["model_kwargs"] = extra values["model_kwargs"] = extra
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
replicate_api_token = get_from_dict_or_env( replicate_api_token = get_from_dict_or_env(

View File

@ -8,7 +8,8 @@ from typing import Any, Dict, List, Mapping, Optional, Set
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, Extra
from langchain_core.utils import pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -98,7 +99,7 @@ class RWKV(LLM, BaseModel):
"verbose", "verbose",
} }
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that the python package exists in the environment.""" """Validate that the python package exists in the environment."""
try: try:

View File

@ -7,7 +7,8 @@ from typing import Any, Dict, Generic, Iterator, List, Mapping, Optional, TypeVa
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, root_validator from langchain_core.pydantic_v1 import Extra
from langchain_core.utils import pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -249,7 +250,7 @@ class SagemakerEndpoint(LLM):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Dont do anything if client provided externally""" """Dont do anything if client provided externally"""
if values.get("client") is not None: if values.get("client") is not None:

View File

@ -5,8 +5,8 @@ import requests
from langchain_core.callbacks.manager import CallbackManagerForLLMRun from langchain_core.callbacks.manager import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.outputs import GenerationChunk from langchain_core.outputs import GenerationChunk
from langchain_core.pydantic_v1 import Extra, root_validator from langchain_core.pydantic_v1 import Extra
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
class SVEndpointHandler: class SVEndpointHandler:
@ -218,7 +218,7 @@ class Sambaverse(LLM):
def is_lc_serializable(cls) -> bool: def is_lc_serializable(cls) -> bool:
return True return True
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key exists in environment.""" """Validate that api key exists in environment."""
values["sambaverse_url"] = get_from_dict_or_env( values["sambaverse_url"] = get_from_dict_or_env(
@ -731,7 +731,7 @@ class SambaStudio(LLM):
"""Return type of llm.""" """Return type of llm."""
return "Sambastudio LLM" return "Sambastudio LLM"
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["sambastudio_base_url"] = get_from_dict_or_env( values["sambastudio_base_url"] = get_from_dict_or_env(

View File

@ -4,7 +4,7 @@ import requests
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models import LLM from langchain_core.language_models import LLM
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -68,7 +68,7 @@ class SolarCommon(BaseModel):
def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]: def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]:
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
api_key = get_from_dict_or_env(values, "solar_api_key", "SOLAR_API_KEY") api_key = get_from_dict_or_env(values, "solar_api_key", "SOLAR_API_KEY")
if api_key is None or len(api_key) == 0: if api_key is None or len(api_key) == 0:

View File

@ -17,8 +17,8 @@ from wsgiref.handlers import format_date_time
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.outputs import GenerationChunk from langchain_core.outputs import GenerationChunk
from langchain_core.pydantic_v1 import Field, root_validator from langchain_core.pydantic_v1 import Field
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -54,7 +54,7 @@ class SparkLLM(LLM):
top_k: int = 4 top_k: int = 4
model_kwargs: Dict[str, Any] = Field(default_factory=dict) model_kwargs: Dict[str, Any] = Field(default_factory=dict)
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
values["spark_app_id"] = get_from_dict_or_env( values["spark_app_id"] = get_from_dict_or_env(
values, values,

View File

@ -6,7 +6,7 @@ import requests
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -58,7 +58,7 @@ class StochasticAI(LLM):
values["model_kwargs"] = extra values["model_kwargs"] = extra
return values return values
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key exists in environment.""" """Validate that api key exists in environment."""
stochasticai_api_key = convert_to_secret_str( stochasticai_api_key = convert_to_secret_str(

View File

@ -5,8 +5,8 @@ from typing import Any, Callable, Dict, List, Mapping, Optional
import requests import requests
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, SecretStr, root_validator from langchain_core.pydantic_v1 import Extra, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from requests import ConnectTimeout, ReadTimeout, RequestException from requests import ConnectTimeout, ReadTimeout, RequestException
from tenacity import ( from tenacity import (
before_sleep_log, before_sleep_log,
@ -65,7 +65,7 @@ class Nebula(LLM):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
nebula_service_url = get_from_dict_or_env( nebula_service_url = get_from_dict_or_env(

View File

@ -24,8 +24,8 @@ from langchain_core.callbacks import (
) )
from langchain_core.language_models.llms import BaseLLM from langchain_core.language_models.llms import BaseLLM
from langchain_core.outputs import Generation, GenerationChunk, LLMResult from langchain_core.outputs import Generation, GenerationChunk, LLMResult
from langchain_core.pydantic_v1 import Field, root_validator from langchain_core.pydantic_v1 import Field
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
from requests.exceptions import HTTPError from requests.exceptions import HTTPError
from tenacity import ( from tenacity import (
before_sleep_log, before_sleep_log,
@ -198,7 +198,7 @@ class Tongyi(BaseLLM):
"""Return type of llm.""" """Return type of llm."""
return "tongyi" return "tongyi"
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment.""" """Validate that api key and python package exists in environment."""
values["dashscope_api_key"] = get_from_dict_or_env( values["dashscope_api_key"] = get_from_dict_or_env(

View File

@ -10,7 +10,8 @@ from langchain_core.callbacks.manager import (
) )
from langchain_core.language_models.llms import BaseLLM from langchain_core.language_models.llms import BaseLLM
from langchain_core.outputs import Generation, GenerationChunk, LLMResult from langchain_core.outputs import Generation, GenerationChunk, LLMResult
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_core.utils import pre_init
from langchain_community.utilities.vertexai import ( from langchain_community.utilities.vertexai import (
create_retry_decorator, create_retry_decorator,
@ -222,7 +223,7 @@ class VertexAI(_VertexAICommon, BaseLLM):
"""Get the namespace of the langchain object.""" """Get the namespace of the langchain object."""
return ["langchain", "llms", "vertexai"] return ["langchain", "llms", "vertexai"]
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that the python package exists in environment.""" """Validate that the python package exists in environment."""
tuned_model_name = values.get("tuned_model_name") tuned_model_name = values.get("tuned_model_name")
@ -409,7 +410,7 @@ class VertexAIModelGarden(_VertexAIBase, BaseLLM):
"Set result_arg to None if output of the model is expected to be a string." "Set result_arg to None if output of the model is expected to be a string."
"Otherwise, if it's a dict, provided an argument that contains the result." "Otherwise, if it's a dict, provided an argument that contains the result."
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that the python package exists in environment.""" """Validate that the python package exists in environment."""
try: try:

View File

@ -3,7 +3,8 @@ from typing import Any, Dict, List, Optional
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import BaseLLM from langchain_core.language_models.llms import BaseLLM
from langchain_core.outputs import Generation, LLMResult from langchain_core.outputs import Generation, LLMResult
from langchain_core.pydantic_v1 import Field, root_validator from langchain_core.pydantic_v1 import Field
from langchain_core.utils import pre_init
from langchain_community.llms.openai import BaseOpenAI from langchain_community.llms.openai import BaseOpenAI
from langchain_community.utils.openai import is_openai_v1 from langchain_community.utils.openai import is_openai_v1
@ -73,7 +74,7 @@ class VLLM(BaseLLM):
client: Any #: :meta private: client: Any #: :meta private:
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that python package exists in environment.""" """Validate that python package exists in environment."""

View File

@ -5,8 +5,8 @@ from typing import Any, Dict, Iterator, List, Optional
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.outputs import GenerationChunk from langchain_core.outputs import GenerationChunk
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
class VolcEngineMaasBase(BaseModel): class VolcEngineMaasBase(BaseModel):
@ -52,7 +52,7 @@ class VolcEngineMaasBase(BaseModel):
"""Timeout for read response from volc engine maas endpoint. """Timeout for read response from volc engine maas endpoint.
Default is 60 seconds.""" Default is 60 seconds."""
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
volc_engine_maas_ak = convert_to_secret_str( volc_engine_maas_ak = convert_to_secret_str(
get_from_dict_or_env(values, "volc_engine_maas_ak", "VOLC_ACCESSKEY") get_from_dict_or_env(values, "volc_engine_maas_ak", "VOLC_ACCESSKEY")

View File

@ -6,8 +6,8 @@ from langchain_core._api.deprecation import deprecated
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import BaseLLM from langchain_core.language_models.llms import BaseLLM
from langchain_core.outputs import Generation, GenerationChunk, LLMResult from langchain_core.outputs import Generation, GenerationChunk, LLMResult
from langchain_core.pydantic_v1 import Extra, SecretStr, root_validator from langchain_core.pydantic_v1 import Extra, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -115,7 +115,7 @@ class WatsonxLLM(BaseLLM):
"instance_id": "WATSONX_INSTANCE_ID", "instance_id": "WATSONX_INSTANCE_ID",
} }
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that credentials and python package exists in environment.""" """Validate that credentials and python package exists in environment."""
values["url"] = convert_to_secret_str( values["url"] = convert_to_secret_str(

View File

@ -3,8 +3,8 @@ from typing import Any, Dict, List, Mapping, Optional
import requests import requests
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, root_validator from langchain_core.pydantic_v1 import Extra
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -69,7 +69,7 @@ class Writer(LLM):
extra = Extra.forbid extra = Extra.forbid
@root_validator() @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and organization id exist in environment.""" """Validate that api key and organization id exist in environment."""

Some files were not shown because too many files have changed in this diff Show More