From 47a9199fa6ed72510814dcdffbb1f6749eae5c21 Mon Sep 17 00:00:00 2001 From: Tibor Reiss <75096465+tibor-reiss@users.noreply.github.com> Date: Thu, 3 Oct 2024 22:12:11 +0200 Subject: [PATCH] community[patch]: Fix missing protected_namespaces (#27076) Fixes #26861 --------- Co-authored-by: Eugene Yurtsev --- libs/community/langchain_community/llms/azureml_endpoint.py | 4 +++- libs/community/langchain_community/llms/bedrock.py | 2 ++ libs/community/langchain_community/llms/minimax.py | 4 +++- libs/community/langchain_community/llms/vertexai.py | 4 +++- libs/community/langchain_community/llms/volcengine_maas.py | 4 +++- 5 files changed, 14 insertions(+), 4 deletions(-) diff --git a/libs/community/langchain_community/llms/azureml_endpoint.py b/libs/community/langchain_community/llms/azureml_endpoint.py index f2b5eed2e1c..5c34ca15477 100644 --- a/libs/community/langchain_community/llms/azureml_endpoint.py +++ b/libs/community/langchain_community/llms/azureml_endpoint.py @@ -9,7 +9,7 @@ from langchain_core.callbacks.manager import CallbackManagerForLLMRun from langchain_core.language_models.llms import BaseLLM from langchain_core.outputs import Generation, LLMResult from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env -from pydantic import BaseModel, SecretStr, model_validator, validator +from pydantic import BaseModel, ConfigDict, SecretStr, model_validator, validator DEFAULT_TIMEOUT = 50 @@ -382,6 +382,8 @@ class AzureMLBaseEndpoint(BaseModel): model_kwargs: Optional[dict] = None """Keyword arguments to pass to the model.""" + model_config = ConfigDict(protected_namespaces=()) + @model_validator(mode="before") @classmethod def validate_environ(cls, values: Dict) -> Any: diff --git a/libs/community/langchain_community/llms/bedrock.py b/libs/community/langchain_community/llms/bedrock.py index 2436ca765b1..a0de3fb81e2 100644 --- a/libs/community/langchain_community/llms/bedrock.py +++ b/libs/community/langchain_community/llms/bedrock.py @@ -295,6 +295,8 @@ class LLMInputOutputAdapter: class BedrockBase(BaseModel, ABC): """Base class for Bedrock models.""" + model_config = ConfigDict(protected_namespaces=()) + client: Any = Field(exclude=True) #: :meta private: region_name: Optional[str] = None diff --git a/libs/community/langchain_community/llms/minimax.py b/libs/community/langchain_community/llms/minimax.py index e508eb4f959..5a1822fffb2 100644 --- a/libs/community/langchain_community/llms/minimax.py +++ b/libs/community/langchain_community/llms/minimax.py @@ -16,7 +16,7 @@ from langchain_core.callbacks import ( ) from langchain_core.language_models.llms import LLM from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init -from pydantic import BaseModel, Field, SecretStr, model_validator +from pydantic import BaseModel, ConfigDict, Field, SecretStr, model_validator from langchain_community.llms.utils import enforce_stop_tokens @@ -58,6 +58,8 @@ class _MinimaxEndpointClient(BaseModel): class MinimaxCommon(BaseModel): """Common parameters for Minimax large language models.""" + model_config = ConfigDict(protected_namespaces=()) + _client: _MinimaxEndpointClient model: str = "abab5.5-chat" """Model name to use.""" diff --git a/libs/community/langchain_community/llms/vertexai.py b/libs/community/langchain_community/llms/vertexai.py index 35fd883fdd7..0b42bd535ef 100644 --- a/libs/community/langchain_community/llms/vertexai.py +++ b/libs/community/langchain_community/llms/vertexai.py @@ -11,7 +11,7 @@ from langchain_core.callbacks.manager import ( from langchain_core.language_models.llms import BaseLLM from langchain_core.outputs import Generation, GenerationChunk, LLMResult from langchain_core.utils import pre_init -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from langchain_community.utilities.vertexai import ( create_retry_decorator, @@ -100,6 +100,8 @@ async def acompletion_with_retry( class _VertexAIBase(BaseModel): + model_config = ConfigDict(protected_namespaces=()) + project: Optional[str] = None "The default GCP project to use when making Vertex API calls." location: str = "us-central1" diff --git a/libs/community/langchain_community/llms/volcengine_maas.py b/libs/community/langchain_community/llms/volcengine_maas.py index c62c3523bb3..e737a0a5569 100644 --- a/libs/community/langchain_community/llms/volcengine_maas.py +++ b/libs/community/langchain_community/llms/volcengine_maas.py @@ -6,12 +6,14 @@ from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.language_models.llms import LLM from langchain_core.outputs import GenerationChunk from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init -from pydantic import BaseModel, Field, SecretStr +from pydantic import BaseModel, ConfigDict, Field, SecretStr class VolcEngineMaasBase(BaseModel): """Base class for VolcEngineMaas models.""" + model_config = ConfigDict(protected_namespaces=()) + client: Any = None volc_engine_maas_ak: Optional[SecretStr] = None