mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-26 08:33:49 +00:00
core[minor],community[minor]: Upgrade all @root_validator() to @pre_init (#23841)
This PR introduces a @pre_init decorator that's a @root_validator(pre=True) but with all the defaults populated!
This commit is contained in:
parent
f152d6ed3d
commit
2c180d645e
@ -9,8 +9,8 @@ from typing import Any, Callable, Dict, List, Union
|
||||
|
||||
from langchain_core._api.deprecation import deprecated
|
||||
from langchain_core.outputs import ChatResult
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.chat_models.openai import ChatOpenAI
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
@ -106,7 +106,7 @@ class AzureChatOpenAI(ChatOpenAI):
|
||||
"""Get the namespace of the langchain object."""
|
||||
return ["langchain", "chat_models", "azure_openai"]
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
if values["n"] < 1:
|
||||
|
@ -50,11 +50,10 @@ from langchain_core.pydantic_v1 import (
|
||||
Extra,
|
||||
Field,
|
||||
SecretStr,
|
||||
root_validator,
|
||||
)
|
||||
from langchain_core.runnables import Runnable, RunnableMap, RunnablePassthrough
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils.function_calling import convert_to_openai_tool
|
||||
|
||||
from langchain_community.utilities.requests import Requests
|
||||
@ -300,7 +299,7 @@ class ChatEdenAI(BaseChatModel):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key exists in environment."""
|
||||
values["edenai_api_key"] = convert_to_secret_str(
|
||||
|
@ -21,8 +21,8 @@ from langchain_core.outputs import (
|
||||
ChatGeneration,
|
||||
ChatResult,
|
||||
)
|
||||
from langchain_core.pydantic_v1 import BaseModel, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
from tenacity import (
|
||||
before_sleep_log,
|
||||
retry,
|
||||
@ -261,7 +261,7 @@ class ChatGooglePalm(BaseChatModel, BaseModel):
|
||||
"""Get the namespace of the langchain object."""
|
||||
return ["langchain", "chat_models", "google_palm"]
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate api key, python package exists, temperature, top_p, and top_k."""
|
||||
google_api_key = convert_to_secret_str(
|
||||
|
@ -29,6 +29,7 @@ from langchain_core.utils import (
|
||||
convert_to_secret_str,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -190,7 +191,7 @@ class ChatHunyuan(BaseChatModel):
|
||||
values["model_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
values["hunyuan_api_base"] = get_from_dict_or_env(
|
||||
values,
|
||||
|
@ -45,6 +45,7 @@ from langchain_core.utils import (
|
||||
convert_to_secret_str,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
)
|
||||
from tenacity import (
|
||||
before_sleep_log,
|
||||
@ -218,7 +219,7 @@ class JinaChat(BaseChatModel):
|
||||
values["model_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["jinachat_api_key"] = convert_to_secret_str(
|
||||
|
@ -11,6 +11,8 @@ from importlib.metadata import version
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, cast
|
||||
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import gpudb
|
||||
|
||||
@ -24,7 +26,7 @@ from langchain_core.messages import (
|
||||
)
|
||||
from langchain_core.output_parsers.transform import BaseOutputParser
|
||||
from langchain_core.outputs import ChatGeneration, ChatResult, Generation
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
@ -341,7 +343,7 @@ class ChatKinetica(BaseChatModel):
|
||||
kdbc: Any = Field(exclude=True)
|
||||
""" Kinetica DB connection. """
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Pydantic object validator."""
|
||||
|
||||
|
@ -23,8 +23,8 @@ from langchain_core.callbacks import (
|
||||
)
|
||||
from langchain_core.messages import AIMessageChunk, BaseMessage
|
||||
from langchain_core.outputs import ChatGenerationChunk, ChatResult
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.adapters.openai import (
|
||||
convert_message_to_dict,
|
||||
@ -85,7 +85,7 @@ class ChatKonko(ChatOpenAI):
|
||||
max_tokens: int = 20
|
||||
"""Maximum number of tokens to generate."""
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["konko_api_key"] = convert_to_secret_str(
|
||||
|
@ -48,10 +48,10 @@ from langchain_core.outputs import (
|
||||
ChatGenerationChunk,
|
||||
ChatResult,
|
||||
)
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||
from langchain_core.runnables import Runnable
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils.function_calling import convert_to_openai_tool
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -249,7 +249,7 @@ class ChatLiteLLM(BaseChatModel):
|
||||
|
||||
return _completion_with_retry(**kwargs)
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate api key, python package exists, temperature, top_p, and top_k."""
|
||||
try:
|
||||
|
@ -2,10 +2,10 @@
|
||||
|
||||
from typing import Dict
|
||||
|
||||
from langchain_core.pydantic_v1 import root_validator
|
||||
from langchain_core.utils import (
|
||||
convert_to_secret_str,
|
||||
get_from_dict_or_env,
|
||||
pre_init,
|
||||
)
|
||||
|
||||
from langchain_community.chat_models import ChatOpenAI
|
||||
@ -29,7 +29,7 @@ class MoonshotChat(MoonshotCommon, ChatOpenAI): # type: ignore[misc]
|
||||
moonshot = MoonshotChat(model="moonshot-v1-8k")
|
||||
"""
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that the environment is set up correctly."""
|
||||
values["moonshot_api_key"] = convert_to_secret_str(
|
||||
|
@ -2,8 +2,8 @@
|
||||
|
||||
from typing import Dict
|
||||
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.chat_models.openai import ChatOpenAI
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
@ -48,7 +48,7 @@ class ChatOctoAI(ChatOpenAI):
|
||||
def is_lc_serializable(cls) -> bool:
|
||||
return False
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["octoai_api_base"] = get_from_dict_or_env(
|
||||
|
@ -49,6 +49,7 @@ from langchain_core.runnables import Runnable
|
||||
from langchain_core.utils import (
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
)
|
||||
|
||||
from langchain_community.adapters.openai import (
|
||||
@ -274,7 +275,7 @@ class ChatOpenAI(BaseChatModel):
|
||||
values["model_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
if values["n"] < 1:
|
||||
|
@ -40,9 +40,8 @@ from langchain_core.pydantic_v1 import (
|
||||
Extra,
|
||||
Field,
|
||||
SecretStr,
|
||||
root_validator,
|
||||
)
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from premai.api.chat_completions.v1_chat_completions_create import (
|
||||
@ -249,7 +248,7 @@ class ChatPremAI(BaseChatModel, BaseModel):
|
||||
allow_population_by_field_name = True
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environments(cls, values: Dict) -> Dict:
|
||||
"""Validate that the package is installed and that the API token is valid"""
|
||||
try:
|
||||
|
@ -16,6 +16,7 @@ from langchain_core.utils import (
|
||||
convert_to_secret_str,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
)
|
||||
from langchain_core.utils.utils import build_extra_kwargs
|
||||
|
||||
@ -135,7 +136,7 @@ class ChatSnowflakeCortex(BaseChatModel):
|
||||
)
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
try:
|
||||
from snowflake.snowpark import Session
|
||||
|
@ -3,8 +3,8 @@
|
||||
from typing import Dict
|
||||
|
||||
from langchain_core._api import deprecated
|
||||
from langchain_core.pydantic_v1 import Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.chat_models import ChatOpenAI
|
||||
from langchain_community.llms.solar import SOLAR_SERVICE_URL_BASE, SolarCommon
|
||||
@ -37,7 +37,7 @@ class SolarChat(SolarCommon, ChatOpenAI):
|
||||
arbitrary_types_allowed = True
|
||||
extra = "ignore"
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that the environment is set up correctly."""
|
||||
values["solar_api_key"] = get_from_dict_or_env(
|
||||
|
@ -49,10 +49,10 @@ from langchain_core.outputs import (
|
||||
ChatGenerationChunk,
|
||||
ChatResult,
|
||||
)
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr
|
||||
from langchain_core.runnables import Runnable
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils.function_calling import convert_to_openai_tool
|
||||
from requests.exceptions import HTTPError
|
||||
from tenacity import (
|
||||
@ -431,7 +431,7 @@ class ChatTongyi(BaseChatModel):
|
||||
"""Return type of llm."""
|
||||
return "tongyi"
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["dashscope_api_key"] = convert_to_secret_str(
|
||||
|
@ -27,7 +27,7 @@ from langchain_core.messages import (
|
||||
SystemMessage,
|
||||
)
|
||||
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
|
||||
from langchain_core.pydantic_v1 import root_validator
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
from langchain_community.llms.vertexai import (
|
||||
_VertexAICommon,
|
||||
@ -225,7 +225,7 @@ class ChatVertexAI(_VertexAICommon, BaseChatModel):
|
||||
"""Get the namespace of the langchain object."""
|
||||
return ["langchain", "chat_models", "vertexai"]
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that the python package exists in environment."""
|
||||
is_gemini = is_gemini_model(values["model_name"])
|
||||
|
@ -44,6 +44,7 @@ from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
|
||||
from langchain_core.utils import (
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
)
|
||||
from tenacity import (
|
||||
before_sleep_log,
|
||||
@ -166,7 +167,7 @@ class ChatYuan2(BaseChatModel):
|
||||
values["model_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["yuan2_api_key"] = get_from_dict_or_env(
|
||||
|
@ -4,8 +4,8 @@ from __future__ import annotations
|
||||
|
||||
from typing import Dict
|
||||
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.embeddings.openai import OpenAIEmbeddings
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
@ -34,7 +34,7 @@ class AnyscaleEmbeddings(OpenAIEmbeddings):
|
||||
"anyscale_api_key": "ANYSCALE_API_KEY",
|
||||
}
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: dict) -> dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["anyscale_api_key"] = convert_to_secret_str(
|
||||
|
@ -4,8 +4,8 @@ import logging
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -48,7 +48,7 @@ class QianfanEmbeddingsEndpoint(BaseModel, Embeddings):
|
||||
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
|
||||
"""extra params for model invoke using with `do`."""
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""
|
||||
Validate whether qianfan_ak and qianfan_sk in the environment variables or
|
||||
|
@ -2,8 +2,8 @@ from typing import Any, Dict, List, Mapping, Optional
|
||||
|
||||
import requests
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
DEFAULT_MODEL_ID = "sentence-transformers/clip-ViT-B-32"
|
||||
MAX_BATCH_SIZE = 1024
|
||||
@ -59,7 +59,7 @@ class DeepInfraEmbeddings(BaseModel, Embeddings):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
deepinfra_api_token = get_from_dict_or_env(
|
||||
|
@ -6,9 +6,8 @@ from langchain_core.pydantic_v1 import (
|
||||
Extra,
|
||||
Field,
|
||||
SecretStr,
|
||||
root_validator,
|
||||
)
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.utilities.requests import Requests
|
||||
|
||||
@ -35,7 +34,7 @@ class EdenAiEmbeddings(BaseModel, Embeddings):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key exists in environment."""
|
||||
values["edenai_api_key"] = convert_to_secret_str(
|
||||
|
@ -2,8 +2,8 @@ from typing import Any, Dict, List, Mapping, Optional
|
||||
|
||||
import requests
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
from requests.adapters import HTTPAdapter, Retry
|
||||
from typing_extensions import NotRequired, TypedDict
|
||||
|
||||
@ -61,7 +61,7 @@ class EmbaasEmbeddings(BaseModel, Embeddings):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
embaas_api_key = convert_to_secret_str(
|
||||
|
@ -6,9 +6,9 @@ from typing import Dict, List, Optional
|
||||
import requests
|
||||
from langchain_core._api.deprecation import deprecated
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, root_validator
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
from langchain_core.runnables.config import run_in_executor
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -31,7 +31,7 @@ class ErnieEmbeddings(BaseModel, Embeddings):
|
||||
|
||||
_lock = threading.Lock()
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
values["ernie_api_base"] = get_from_dict_or_env(
|
||||
values, "ernie_api_base", "ERNIE_API_BASE", "https://aip.baidubce.com"
|
||||
|
@ -2,7 +2,8 @@ from typing import Any, Dict, List, Literal, Optional
|
||||
|
||||
import numpy as np
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
|
||||
class FastEmbedEmbeddings(BaseModel, Embeddings):
|
||||
@ -54,7 +55,7 @@ class FastEmbedEmbeddings(BaseModel, Embeddings):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that FastEmbed has been installed."""
|
||||
model_name = values.get("model_name")
|
||||
|
@ -5,7 +5,8 @@ from functools import cached_property
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, root_validator
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -77,7 +78,7 @@ class GigaChatEmbeddings(BaseModel, Embeddings):
|
||||
key_file_password=self.key_file_password,
|
||||
)
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate authenticate data in environment and python package is installed."""
|
||||
try:
|
||||
|
@ -4,8 +4,8 @@ import logging
|
||||
from typing import Any, Callable, Dict, List, Optional
|
||||
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
from tenacity import (
|
||||
before_sleep_log,
|
||||
retry,
|
||||
@ -62,7 +62,7 @@ class GooglePalmEmbeddings(BaseModel, Embeddings):
|
||||
show_progress_bar: bool = False
|
||||
"""Whether to show a tqdm progress bar. Must have `tqdm` installed."""
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate api key, python package exists."""
|
||||
google_api_key = get_from_dict_or_env(
|
||||
|
@ -2,7 +2,8 @@ from typing import Any, Dict, List, Optional
|
||||
|
||||
import numpy as np
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
LASER_MULTILINGUAL_MODEL: str = "laser2"
|
||||
|
||||
@ -41,7 +42,7 @@ class LaserEmbeddings(BaseModel, Embeddings):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that laser_encoders has been installed."""
|
||||
try:
|
||||
|
@ -4,8 +4,8 @@ from typing import Dict, List, Optional
|
||||
|
||||
import requests
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
|
||||
class LLMRailsEmbeddings(BaseModel, Embeddings):
|
||||
@ -37,7 +37,7 @@ class LLMRailsEmbeddings(BaseModel, Embeddings):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key exists in environment."""
|
||||
api_key = convert_to_secret_str(
|
||||
|
@ -17,7 +17,11 @@ from typing import (
|
||||
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env, get_pydantic_field_names
|
||||
from langchain_core.utils import (
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
)
|
||||
from tenacity import (
|
||||
AsyncRetrying,
|
||||
before_sleep_log,
|
||||
@ -193,7 +197,7 @@ class LocalAIEmbeddings(BaseModel, Embeddings):
|
||||
values["model_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["openai_api_key"] = get_from_dict_or_env(
|
||||
|
@ -5,8 +5,8 @@ from typing import Any, Callable, Dict, List, Optional
|
||||
|
||||
import requests
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
from tenacity import (
|
||||
before_sleep_log,
|
||||
retry,
|
||||
@ -84,7 +84,7 @@ class MiniMaxEmbeddings(BaseModel, Embeddings):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that group id and api key exists in environment."""
|
||||
minimax_group_id = get_from_dict_or_env(
|
||||
|
@ -8,7 +8,8 @@ import aiohttp
|
||||
import requests
|
||||
from langchain_core._api.deprecation import deprecated
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, root_validator
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
|
||||
def is_endpoint_live(url: str, headers: Optional[dict], payload: Any) -> bool:
|
||||
@ -58,7 +59,7 @@ class NeMoEmbeddings(BaseModel, Embeddings):
|
||||
model: str = "NV-Embed-QA-003"
|
||||
api_endpoint_url: str = "http://localhost:8088/v1/embeddings"
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that the end point is alive using the values that are provided."""
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
|
||||
class NLPCloudEmbeddings(BaseModel, Embeddings):
|
||||
@ -30,7 +30,7 @@ class NLPCloudEmbeddings(BaseModel, Embeddings):
|
||||
) -> None:
|
||||
super().__init__(model_name=model_name, gpu=gpu, **kwargs)
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
nlpcloud_api_key = get_from_dict_or_env(
|
||||
|
@ -2,7 +2,8 @@ from enum import Enum
|
||||
from typing import Any, Dict, Iterator, List, Mapping, Optional
|
||||
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
CUSTOM_ENDPOINT_PREFIX = "ocid1.generativeaiendpoint"
|
||||
|
||||
@ -89,7 +90,7 @@ class OCIGenAIEmbeddings(BaseModel, Embeddings):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict: # pylint: disable=no-self-argument
|
||||
"""Validate that OCI config and python package exists in environment."""
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
from typing import Dict
|
||||
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.embeddings.openai import OpenAIEmbeddings
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
@ -38,7 +38,7 @@ class OctoAIEmbeddings(OpenAIEmbeddings):
|
||||
def lc_secrets(self) -> Dict[str, str]:
|
||||
return {"octoai_api_token": "OCTOAI_API_TOKEN"}
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: dict) -> dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["endpoint_url"] = get_from_dict_or_env(
|
||||
|
@ -22,7 +22,11 @@ import numpy as np
|
||||
from langchain_core._api.deprecation import deprecated
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env, get_pydantic_field_names
|
||||
from langchain_core.utils import (
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
)
|
||||
from tenacity import (
|
||||
AsyncRetrying,
|
||||
before_sleep_log,
|
||||
@ -282,7 +286,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
|
||||
values["model_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["openai_api_key"] = get_from_dict_or_env(
|
||||
|
@ -5,8 +5,8 @@ from typing import Any, Callable, Dict, List, Optional, Union
|
||||
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.language_models.llms import create_base_retry_decorator
|
||||
from langchain_core.pydantic_v1 import BaseModel, SecretStr, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel, SecretStr
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -32,7 +32,7 @@ class PremAIEmbeddings(BaseModel, Embeddings):
|
||||
|
||||
client: Any
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environments(cls, values: Dict) -> Dict:
|
||||
"""Validate that the package is installed and that the API token is valid"""
|
||||
try:
|
||||
|
@ -1,7 +1,8 @@
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
from langchain_community.llms.sagemaker_endpoint import ContentHandlerBase
|
||||
|
||||
@ -115,7 +116,7 @@ class SagemakerEndpointEmbeddings(BaseModel, Embeddings):
|
||||
extra = Extra.forbid
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Dont do anything if client provided externally"""
|
||||
if values.get("client") is not None:
|
||||
|
@ -3,8 +3,8 @@ from typing import Dict, Generator, List, Optional
|
||||
|
||||
import requests
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
|
||||
class SambaStudioEmbeddings(BaseModel, Embeddings):
|
||||
@ -64,7 +64,7 @@ class SambaStudioEmbeddings(BaseModel, Embeddings):
|
||||
batch_size: int = 32
|
||||
"""Batch size for the embedding models"""
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["sambastudio_embeddings_base_url"] = get_from_dict_or_env(
|
||||
|
@ -6,8 +6,8 @@ from typing import Any, Callable, Dict, List, Optional
|
||||
import requests
|
||||
from langchain_core._api import deprecated
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
from tenacity import (
|
||||
before_sleep_log,
|
||||
retry,
|
||||
@ -80,7 +80,7 @@ class SolarEmbeddings(BaseModel, Embeddings):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate api key exists in environment."""
|
||||
solar_api_key = convert_to_secret_str(
|
||||
|
@ -8,7 +8,7 @@ from typing import Any, Dict, List, Literal, Optional, Tuple
|
||||
from langchain_core._api.deprecation import deprecated
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.language_models.llms import create_base_retry_decorator
|
||||
from langchain_core.pydantic_v1 import root_validator
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
from langchain_community.llms.vertexai import _VertexAICommon
|
||||
from langchain_community.utilities.vertexai import raise_vertex_import_error
|
||||
@ -33,7 +33,7 @@ class VertexAIEmbeddings(_VertexAICommon, Embeddings):
|
||||
show_progress_bar: bool = False
|
||||
"""Whether to show a tqdm progress bar. Must have `tqdm` installed."""
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validates that the python package exists in environment."""
|
||||
cls._try_init_vertexai(values)
|
||||
|
@ -4,8 +4,8 @@ import logging
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -43,7 +43,7 @@ class VolcanoEmbeddings(BaseModel, Embeddings):
|
||||
client: Any
|
||||
"""volcano client"""
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""
|
||||
Validate whether volcano_ak and volcano_sk in the environment variables or
|
||||
|
@ -7,8 +7,8 @@ import time
|
||||
from typing import Any, Callable, Dict, List, Sequence
|
||||
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
from tenacity import (
|
||||
before_sleep_log,
|
||||
retry,
|
||||
@ -76,7 +76,7 @@ class YandexGPTEmbeddings(BaseModel, Embeddings):
|
||||
|
||||
allow_population_by_field_name = True
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that iam token exists in environment."""
|
||||
|
||||
|
@ -3,8 +3,8 @@ from typing import Any, Dict, List, Optional, cast
|
||||
import requests
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
|
||||
class AI21PenaltyData(BaseModel):
|
||||
@ -73,7 +73,7 @@ class AI21(LLM):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key exists in environment."""
|
||||
ai21_api_key = convert_to_secret_str(
|
||||
|
@ -2,8 +2,8 @@ from typing import Any, Dict, List, Optional, Sequence
|
||||
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Extra, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -129,7 +129,7 @@ class AlephAlpha(LLM):
|
||||
"""Stop sequences to use."""
|
||||
|
||||
# Client params
|
||||
aleph_alpha_api_key: Optional[str] = None
|
||||
aleph_alpha_api_key: Optional[SecretStr] = None
|
||||
"""API key for Aleph Alpha API."""
|
||||
host: str = "https://api.aleph-alpha.com"
|
||||
"""The hostname of the API host.
|
||||
@ -167,7 +167,7 @@ class AlephAlpha(LLM):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["aleph_alpha_api_key"] = convert_to_secret_str(
|
||||
|
@ -25,6 +25,7 @@ from langchain_core.utils import (
|
||||
check_package_version,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
)
|
||||
from langchain_core.utils.utils import build_extra_kwargs, convert_to_secret_str
|
||||
|
||||
@ -74,7 +75,7 @@ class _AnthropicCommon(BaseLanguageModel):
|
||||
)
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["anthropic_api_key"] = convert_to_secret_str(
|
||||
@ -185,7 +186,7 @@ class Anthropic(LLM, _AnthropicCommon):
|
||||
allow_population_by_field_name = True
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def raise_warning(cls, values: Dict) -> Dict:
|
||||
"""Raise warning that this class is deprecated."""
|
||||
warnings.warn(
|
||||
|
@ -14,8 +14,8 @@ from langchain_core.callbacks import (
|
||||
CallbackManagerForLLMRun,
|
||||
)
|
||||
from langchain_core.outputs import Generation, GenerationChunk, LLMResult
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.openai import (
|
||||
BaseOpenAI,
|
||||
@ -93,7 +93,7 @@ class Anyscale(BaseOpenAI):
|
||||
def is_lc_serializable(cls) -> bool:
|
||||
return False
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["anyscale_api_base"] = get_from_dict_or_env(
|
||||
|
@ -3,7 +3,8 @@ from typing import Any, Dict, List, Optional
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models import BaseLLM
|
||||
from langchain_core.outputs import Generation, LLMResult
|
||||
from langchain_core.pydantic_v1 import Field, root_validator
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
|
||||
class Aphrodite(BaseLLM):
|
||||
@ -157,7 +158,7 @@ class Aphrodite(BaseLLM):
|
||||
|
||||
client: Any #: :meta private:
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that python package exists in environment."""
|
||||
|
||||
|
@ -7,8 +7,8 @@ from typing import Any, Dict, List, Optional
|
||||
import requests
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -31,7 +31,7 @@ class BaichuanLLM(LLM):
|
||||
baichuan_api_host: Optional[str] = None
|
||||
baichuan_api_key: Optional[SecretStr] = None
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
values["baichuan_api_key"] = convert_to_secret_str(
|
||||
get_from_dict_or_env(values, "baichuan_api_key", "BAICHUAN_API_KEY")
|
||||
|
@ -16,8 +16,8 @@ from langchain_core.callbacks import (
|
||||
)
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import Field, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -76,7 +76,7 @@ class QianfanLLMEndpoint(LLM):
|
||||
In the case of other model, passing these params will not affect the result.
|
||||
"""
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
values["qianfan_ak"] = convert_to_secret_str(
|
||||
get_from_dict_or_env(
|
||||
|
@ -4,7 +4,7 @@ from typing import Any, Dict, List, Mapping, Optional, cast
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -63,7 +63,7 @@ class Banana(LLM):
|
||||
values["model_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
banana_api_key = convert_to_secret_str(
|
||||
|
@ -10,7 +10,7 @@ import requests
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -95,7 +95,7 @@ class Beam(LLM):
|
||||
values["model_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
beam_client_id = get_from_dict_or_env(
|
||||
|
@ -21,8 +21,8 @@ from langchain_core.callbacks import (
|
||||
)
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, Field
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
from langchain_community.utilities.anthropic import (
|
||||
@ -389,7 +389,7 @@ class BedrockBase(BaseModel, ABC):
|
||||
...Logic to handle guardrail intervention...
|
||||
""" # noqa: E501
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that AWS credentials to and python package exists in environment."""
|
||||
|
||||
@ -743,7 +743,7 @@ class Bedrock(LLM, BedrockBase):
|
||||
|
||||
"""
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
model_id = values["model_id"]
|
||||
if model_id.startswith("anthropic.claude-3"):
|
||||
|
@ -5,7 +5,7 @@ import requests
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -62,7 +62,7 @@ class CerebriumAI(LLM):
|
||||
values["model_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
cerebriumai_api_key = convert_to_secret_str(
|
||||
|
@ -4,7 +4,8 @@ from typing import Any, Dict, List, Optional
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import Generation, LLMResult
|
||||
from langchain_core.pydantic_v1 import Extra, Field, root_validator
|
||||
from langchain_core.pydantic_v1 import Extra, Field
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -53,7 +54,7 @@ class Clarifai(LLM):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that we have all required info to access Clarifai
|
||||
platform and python package exists in environment."""
|
||||
|
@ -10,8 +10,8 @@ from langchain_core.callbacks import (
|
||||
)
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.load.serializable import Serializable
|
||||
from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Extra, Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
from tenacity import (
|
||||
before_sleep_log,
|
||||
retry,
|
||||
@ -95,7 +95,7 @@ class BaseCohere(Serializable):
|
||||
user_agent: str = "langchain"
|
||||
"""Identifier for the application making the request."""
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
try:
|
||||
|
@ -6,7 +6,7 @@ from langchain_core.callbacks import (
|
||||
CallbackManagerForLLMRun,
|
||||
)
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import root_validator
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
|
||||
class CTransformers(LLM):
|
||||
@ -57,7 +57,7 @@ class CTransformers(LLM):
|
||||
"""Return type of llm."""
|
||||
return "ctransformers"
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that ``ctransformers`` package is installed."""
|
||||
try:
|
||||
|
@ -3,7 +3,8 @@ from typing import Any, Dict, List, Optional, Union
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import BaseLLM
|
||||
from langchain_core.outputs import Generation, LLMResult
|
||||
from langchain_core.pydantic_v1 import Field, root_validator
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
|
||||
class CTranslate2(BaseLLM):
|
||||
@ -50,7 +51,7 @@ class CTranslate2(BaseLLM):
|
||||
explicitly specified.
|
||||
"""
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that python package exists in environment."""
|
||||
|
||||
|
@ -8,8 +8,8 @@ from langchain_core.callbacks import (
|
||||
)
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import Extra, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Extra
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.utilities.requests import Requests
|
||||
|
||||
@ -43,7 +43,7 @@ class DeepInfra(LLM):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
deepinfra_api_token = get_from_dict_or_env(
|
||||
|
@ -1,12 +1,18 @@
|
||||
# flake8: noqa
|
||||
from langchain_core.utils import pre_init
|
||||
from typing import Any, AsyncIterator, Dict, Iterator, List, Optional, Union
|
||||
from langchain_core.utils import pre_init
|
||||
from langchain_core.pydantic_v1 import root_validator
|
||||
from langchain_core.utils import pre_init
|
||||
from langchain_core.callbacks import (
|
||||
AsyncCallbackManagerForLLMRun,
|
||||
CallbackManagerForLLMRun,
|
||||
)
|
||||
from langchain_core.utils import pre_init
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.utils import pre_init
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
from langchain_core.utils import pre_init
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
|
||||
|
||||
@ -55,7 +61,7 @@ class DeepSparse(LLM):
|
||||
"""Return type of llm."""
|
||||
return "deepsparse"
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that ``deepsparse`` package is installed."""
|
||||
try:
|
||||
|
@ -10,7 +10,7 @@ from langchain_core.callbacks import (
|
||||
)
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
from langchain_community.utilities.requests import Requests
|
||||
@ -73,7 +73,7 @@ class EdenAI(LLM):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key exists in environment."""
|
||||
values["edenai_api_key"] = get_from_dict_or_env(
|
||||
|
@ -3,7 +3,8 @@ from typing import Any, Dict, Iterator, List, Optional
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import Field, root_validator
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
|
||||
class ExLlamaV2(LLM):
|
||||
@ -58,7 +59,7 @@ class ExLlamaV2(LLM):
|
||||
disallowed_tokens: List[int] = Field(None)
|
||||
"""List of tokens to disallow during generation."""
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict[str, Any]) -> Dict[str, Any]:
|
||||
try:
|
||||
import torch
|
||||
|
@ -9,8 +9,8 @@ from langchain_core.callbacks import (
|
||||
)
|
||||
from langchain_core.language_models.llms import BaseLLM, create_base_retry_decorator
|
||||
from langchain_core.outputs import Generation, GenerationChunk, LLMResult
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, pre_init
|
||||
from langchain_core.utils.env import get_from_dict_or_env
|
||||
|
||||
|
||||
@ -61,7 +61,7 @@ class Fireworks(BaseLLM):
|
||||
"""Get the namespace of the langchain object."""
|
||||
return ["langchain", "llms", "fireworks"]
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key in environment."""
|
||||
try:
|
||||
|
@ -10,7 +10,8 @@ from langchain_core.callbacks.manager import (
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.load.serializable import Serializable
|
||||
from langchain_core.outputs import GenerationChunk, LLMResult
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr
|
||||
from langchain_core.utils import pre_init
|
||||
from langchain_core.utils.env import get_from_dict_or_env
|
||||
from langchain_core.utils.utils import convert_to_secret_str
|
||||
|
||||
@ -66,7 +67,7 @@ class BaseFriendli(Serializable):
|
||||
# is used by default.
|
||||
top_p: Optional[float] = None
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate if personal access token is provided in environment."""
|
||||
try:
|
||||
|
@ -11,7 +11,7 @@ from langchain_core.callbacks import (
|
||||
from langchain_core.language_models.llms import BaseLLM
|
||||
from langchain_core.load.serializable import Serializable
|
||||
from langchain_core.outputs import Generation, GenerationChunk, LLMResult
|
||||
from langchain_core.pydantic_v1 import root_validator
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import gigachat
|
||||
@ -113,7 +113,7 @@ class _BaseGigaChat(Serializable):
|
||||
verbose=self.verbose,
|
||||
)
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate authenticate data in environment and python package is installed."""
|
||||
try:
|
||||
|
@ -6,8 +6,8 @@ from langchain_core._api.deprecation import deprecated
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models import LanguageModelInput
|
||||
from langchain_core.outputs import Generation, GenerationChunk, LLMResult
|
||||
from langchain_core.pydantic_v1 import BaseModel, SecretStr, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel, SecretStr
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms import BaseLLM
|
||||
from langchain_community.utilities.vertexai import create_retry_decorator
|
||||
@ -107,7 +107,7 @@ class GooglePalm(BaseLLM, BaseModel):
|
||||
"""Get the namespace of the langchain object."""
|
||||
return ["langchain", "llms", "google_palm"]
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate api key, python package exists."""
|
||||
google_api_key = get_from_dict_or_env(
|
||||
|
@ -4,7 +4,7 @@ from typing import Any, Dict, List, Mapping, Optional
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -86,7 +86,7 @@ class GooseAI(LLM):
|
||||
values["model_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
gooseai_api_key = convert_to_secret_str(
|
||||
|
@ -3,7 +3,8 @@ from typing import Any, Dict, List, Mapping, Optional, Set
|
||||
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, Field, root_validator
|
||||
from langchain_core.pydantic_v1 import Extra, Field
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -127,7 +128,7 @@ class GPT4All(LLM):
|
||||
"streaming": self.streaming,
|
||||
}
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that the python package exists in the environment."""
|
||||
try:
|
||||
|
@ -10,7 +10,11 @@ from langchain_core.callbacks import (
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import Extra, Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env, get_pydantic_field_names
|
||||
from langchain_core.utils import (
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -162,7 +166,7 @@ class HuggingFaceEndpoint(LLM):
|
||||
values["model"] = values.get("endpoint_url") or values.get("repo_id")
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that package is installed and that the API token is valid."""
|
||||
try:
|
||||
|
@ -4,8 +4,8 @@ from typing import Any, Dict, List, Mapping, Optional
|
||||
from langchain_core._api.deprecation import deprecated
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Extra
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -61,7 +61,7 @@ class HuggingFaceHub(LLM):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
huggingfacehub_api_token = get_from_dict_or_env(
|
||||
|
@ -9,7 +9,7 @@ from langchain_core.callbacks import (
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import Extra, Field, root_validator
|
||||
from langchain_core.utils import get_pydantic_field_names
|
||||
from langchain_core.utils import get_pydantic_field_names, pre_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -134,7 +134,7 @@ class HuggingFaceTextGenInference(LLM):
|
||||
values["model_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that python package exists in environment."""
|
||||
|
||||
|
@ -8,7 +8,7 @@ from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import Field, root_validator
|
||||
from langchain_core.utils import get_pydantic_field_names
|
||||
from langchain_core.utils import get_pydantic_field_names, pre_init
|
||||
from langchain_core.utils.utils import build_extra_kwargs
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -133,7 +133,7 @@ class LlamaCpp(LLM):
|
||||
verbose: bool = True
|
||||
"""Print verbose output to stderr."""
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that llama-cpp-python library is installed."""
|
||||
try:
|
||||
|
@ -2,7 +2,8 @@ from typing import Any, Dict, List, Mapping, Optional
|
||||
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, root_validator
|
||||
from langchain_core.pydantic_v1 import Extra
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
|
||||
class ManifestWrapper(LLM):
|
||||
@ -16,7 +17,7 @@ class ManifestWrapper(LLM):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that python package exists in environment."""
|
||||
try:
|
||||
|
@ -16,7 +16,7 @@ from langchain_core.callbacks import (
|
||||
)
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -72,7 +72,7 @@ class MinimaxCommon(BaseModel):
|
||||
minimax_group_id: Optional[str] = None
|
||||
minimax_api_key: Optional[SecretStr] = None
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["minimax_api_key"] = convert_to_secret_str(
|
||||
|
@ -4,7 +4,7 @@ import requests
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models import LLM
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -79,7 +79,7 @@ class MoonshotCommon(BaseModel):
|
||||
"""
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["moonshot_api_key"] = convert_to_secret_str(
|
||||
|
@ -3,8 +3,8 @@ from typing import Any, Dict, List, Mapping, Optional
|
||||
import requests
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Extra
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -64,7 +64,7 @@ class MosaicML(LLM):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
mosaicml_api_token = get_from_dict_or_env(
|
||||
|
@ -2,8 +2,8 @@ from typing import Any, Dict, List, Mapping, Optional
|
||||
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Extra, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
|
||||
class NLPCloud(LLM):
|
||||
@ -56,7 +56,7 @@ class NLPCloud(LLM):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["nlpcloud_api_key"] = convert_to_secret_str(
|
||||
|
@ -4,8 +4,8 @@ from typing import Any, Dict, List, Optional
|
||||
import requests
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -47,7 +47,7 @@ class OCIModelDeploymentLLM(LLM):
|
||||
"""Stop words to use when generating. Model output is cut off
|
||||
at the first occurrence of any of these substrings."""
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment( # pylint: disable=no-self-argument
|
||||
cls, values: Dict
|
||||
) -> Dict:
|
||||
|
@ -8,7 +8,8 @@ from typing import Any, Dict, Iterator, List, Mapping, Optional
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -99,7 +100,7 @@ class OCIGenAIBase(BaseModel, ABC):
|
||||
is_stream: bool = False
|
||||
"""Whether to stream back partial progress"""
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that OCI config and python package exists in environment."""
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
from typing import Any, Dict
|
||||
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.openai import BaseOpenAI
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
@ -66,7 +66,7 @@ class OctoAIEndpoint(BaseOpenAI):
|
||||
"""Return type of llm."""
|
||||
return "octoai_endpoint"
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["octoai_api_base"] = get_from_dict_or_env(
|
||||
|
@ -5,8 +5,8 @@ from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models import BaseLanguageModel
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.messages import AIMessage
|
||||
from langchain_core.pydantic_v1 import Extra, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Extra
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -38,7 +38,7 @@ class OpaquePrompts(LLM):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validates that the OpaquePrompts API key and the Python package exist."""
|
||||
try:
|
||||
|
@ -29,7 +29,11 @@ from langchain_core.callbacks import (
|
||||
from langchain_core.language_models.llms import BaseLLM, create_base_retry_decorator
|
||||
from langchain_core.outputs import Generation, GenerationChunk, LLMResult
|
||||
from langchain_core.pydantic_v1 import Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env, get_pydantic_field_names
|
||||
from langchain_core.utils import (
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
)
|
||||
from langchain_core.utils.utils import build_extra_kwargs
|
||||
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
@ -269,7 +273,7 @@ class BaseOpenAI(BaseLLM):
|
||||
)
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
if values["n"] < 1:
|
||||
@ -818,7 +822,7 @@ class AzureOpenAI(BaseOpenAI):
|
||||
"""Get the namespace of the langchain object."""
|
||||
return ["langchain", "llms", "openai"]
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
if values["n"] < 1:
|
||||
@ -1025,7 +1029,7 @@ class OpenAIChat(BaseLLM):
|
||||
values["model_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
openai_api_key = get_from_dict_or_env(
|
||||
|
@ -1,6 +1,6 @@
|
||||
from typing import Any, Dict
|
||||
|
||||
from langchain_core.pydantic_v1 import root_validator
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
from langchain_community.llms.openai import BaseOpenAI
|
||||
|
||||
@ -16,7 +16,7 @@ class OpenLM(BaseOpenAI):
|
||||
def _invocation_params(self) -> Dict[str, Any]:
|
||||
return {**{"model": self.model_name}, **super()._invocation_params}
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
try:
|
||||
import openlm
|
||||
|
@ -6,8 +6,7 @@ import requests
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -52,7 +51,7 @@ class PaiEasEndpoint(LLM):
|
||||
|
||||
version: Optional[str] = "2.0"
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["eas_service_url"] = get_from_dict_or_env(
|
||||
|
@ -4,7 +4,7 @@ from typing import Any, Dict, List, Mapping, Optional
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -86,7 +86,7 @@ class Petals(LLM):
|
||||
values["model_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
huggingface_api_key = convert_to_secret_str(
|
||||
|
@ -10,7 +10,7 @@ from langchain_core.pydantic_v1 import (
|
||||
SecretStr,
|
||||
root_validator,
|
||||
)
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -65,7 +65,7 @@ class PipelineAI(LLM, BaseModel):
|
||||
values["pipeline_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
pipeline_api_key = convert_to_secret_str(
|
||||
|
@ -3,8 +3,8 @@ from typing import Any, Dict, List, Optional
|
||||
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Extra
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -53,7 +53,7 @@ class PredictionGuard(LLM):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that the access token and python package exists in environment."""
|
||||
token = get_from_dict_or_env(values, "token", "PREDICTIONGUARD_TOKEN")
|
||||
|
@ -7,7 +7,7 @@ from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import Extra, Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from replicate.prediction import Prediction
|
||||
@ -97,7 +97,7 @@ class Replicate(LLM):
|
||||
values["model_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
replicate_api_token = get_from_dict_or_env(
|
||||
|
@ -8,7 +8,8 @@ from typing import Any, Dict, List, Mapping, Optional, Set
|
||||
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -98,7 +99,7 @@ class RWKV(LLM, BaseModel):
|
||||
"verbose",
|
||||
}
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that the python package exists in the environment."""
|
||||
try:
|
||||
|
@ -7,7 +7,8 @@ from typing import Any, Dict, Generic, Iterator, List, Mapping, Optional, TypeVa
|
||||
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, root_validator
|
||||
from langchain_core.pydantic_v1 import Extra
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -249,7 +250,7 @@ class SagemakerEndpoint(LLM):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Dont do anything if client provided externally"""
|
||||
if values.get("client") is not None:
|
||||
|
@ -5,8 +5,8 @@ import requests
|
||||
from langchain_core.callbacks.manager import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import Extra, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Extra
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
|
||||
class SVEndpointHandler:
|
||||
@ -218,7 +218,7 @@ class Sambaverse(LLM):
|
||||
def is_lc_serializable(cls) -> bool:
|
||||
return True
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key exists in environment."""
|
||||
values["sambaverse_url"] = get_from_dict_or_env(
|
||||
@ -731,7 +731,7 @@ class SambaStudio(LLM):
|
||||
"""Return type of llm."""
|
||||
return "Sambastudio LLM"
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["sambastudio_base_url"] = get_from_dict_or_env(
|
||||
|
@ -4,7 +4,7 @@ import requests
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models import LLM
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -68,7 +68,7 @@ class SolarCommon(BaseModel):
|
||||
def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]:
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
api_key = get_from_dict_or_env(values, "solar_api_key", "SOLAR_API_KEY")
|
||||
if api_key is None or len(api_key) == 0:
|
||||
|
@ -17,8 +17,8 @@ from wsgiref.handlers import format_date_time
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -54,7 +54,7 @@ class SparkLLM(LLM):
|
||||
top_k: int = 4
|
||||
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
values["spark_app_id"] = get_from_dict_or_env(
|
||||
values,
|
||||
|
@ -6,7 +6,7 @@ import requests
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -58,7 +58,7 @@ class StochasticAI(LLM):
|
||||
values["model_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key exists in environment."""
|
||||
stochasticai_api_key = convert_to_secret_str(
|
||||
|
@ -5,8 +5,8 @@ from typing import Any, Callable, Dict, List, Mapping, Optional
|
||||
import requests
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Extra, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
from requests import ConnectTimeout, ReadTimeout, RequestException
|
||||
from tenacity import (
|
||||
before_sleep_log,
|
||||
@ -65,7 +65,7 @@ class Nebula(LLM):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
nebula_service_url = get_from_dict_or_env(
|
||||
|
@ -24,8 +24,8 @@ from langchain_core.callbacks import (
|
||||
)
|
||||
from langchain_core.language_models.llms import BaseLLM
|
||||
from langchain_core.outputs import Generation, GenerationChunk, LLMResult
|
||||
from langchain_core.pydantic_v1 import Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
from requests.exceptions import HTTPError
|
||||
from tenacity import (
|
||||
before_sleep_log,
|
||||
@ -198,7 +198,7 @@ class Tongyi(BaseLLM):
|
||||
"""Return type of llm."""
|
||||
return "tongyi"
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["dashscope_api_key"] = get_from_dict_or_env(
|
||||
|
@ -10,7 +10,8 @@ from langchain_core.callbacks.manager import (
|
||||
)
|
||||
from langchain_core.language_models.llms import BaseLLM
|
||||
from langchain_core.outputs import Generation, GenerationChunk, LLMResult
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
from langchain_community.utilities.vertexai import (
|
||||
create_retry_decorator,
|
||||
@ -222,7 +223,7 @@ class VertexAI(_VertexAICommon, BaseLLM):
|
||||
"""Get the namespace of the langchain object."""
|
||||
return ["langchain", "llms", "vertexai"]
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that the python package exists in environment."""
|
||||
tuned_model_name = values.get("tuned_model_name")
|
||||
@ -409,7 +410,7 @@ class VertexAIModelGarden(_VertexAIBase, BaseLLM):
|
||||
"Set result_arg to None if output of the model is expected to be a string."
|
||||
"Otherwise, if it's a dict, provided an argument that contains the result."
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that the python package exists in environment."""
|
||||
try:
|
||||
|
@ -3,7 +3,8 @@ from typing import Any, Dict, List, Optional
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import BaseLLM
|
||||
from langchain_core.outputs import Generation, LLMResult
|
||||
from langchain_core.pydantic_v1 import Field, root_validator
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.utils import pre_init
|
||||
|
||||
from langchain_community.llms.openai import BaseOpenAI
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
@ -73,7 +74,7 @@ class VLLM(BaseLLM):
|
||||
|
||||
client: Any #: :meta private:
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that python package exists in environment."""
|
||||
|
||||
|
@ -5,8 +5,8 @@ from typing import Any, Dict, Iterator, List, Optional
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
|
||||
class VolcEngineMaasBase(BaseModel):
|
||||
@ -52,7 +52,7 @@ class VolcEngineMaasBase(BaseModel):
|
||||
"""Timeout for read response from volc engine maas endpoint.
|
||||
Default is 60 seconds."""
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
volc_engine_maas_ak = convert_to_secret_str(
|
||||
get_from_dict_or_env(values, "volc_engine_maas_ak", "VOLC_ACCESSKEY")
|
||||
|
@ -6,8 +6,8 @@ from langchain_core._api.deprecation import deprecated
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import BaseLLM
|
||||
from langchain_core.outputs import Generation, GenerationChunk, LLMResult
|
||||
from langchain_core.pydantic_v1 import Extra, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Extra, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -115,7 +115,7 @@ class WatsonxLLM(BaseLLM):
|
||||
"instance_id": "WATSONX_INSTANCE_ID",
|
||||
}
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that credentials and python package exists in environment."""
|
||||
values["url"] = convert_to_secret_str(
|
||||
|
@ -3,8 +3,8 @@ from typing import Any, Dict, List, Mapping, Optional
|
||||
import requests
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.pydantic_v1 import Extra
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@ -69,7 +69,7 @@ class Writer(LLM):
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator()
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and organization id exist in environment."""
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user