From e2c8690e7fe1a96eb0d49fa443de5b451d8c5cd4 Mon Sep 17 00:00:00 2001 From: ccurme Date: Tue, 10 Sep 2024 18:15:39 -0400 Subject: [PATCH] community[patch]: fix huggingface pydantic 2 init (#26286) --- .../community/langchain_community/embeddings/huggingface.py | 6 +++--- .../langchain_community/embeddings/huggingface_hub.py | 4 ++-- .../langchain_community/llms/huggingface_endpoint.py | 4 ++-- libs/community/langchain_community/llms/huggingface_hub.py | 2 +- .../embeddings/huggingface_endpoint.py | 4 ++-- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/libs/community/langchain_community/embeddings/huggingface.py b/libs/community/langchain_community/embeddings/huggingface.py index e6eeadca580..817a817299f 100644 --- a/libs/community/langchain_community/embeddings/huggingface.py +++ b/libs/community/langchain_community/embeddings/huggingface.py @@ -44,7 +44,7 @@ class HuggingFaceEmbeddings(BaseModel, Embeddings): ) """ - client: Any #: :meta private: + client: Any = None #: :meta private: model_name: str = DEFAULT_MODEL_NAME """Model name to use.""" cache_folder: Optional[str] = None @@ -151,7 +151,7 @@ class HuggingFaceInstructEmbeddings(BaseModel, Embeddings): ) """ - client: Any #: :meta private: + client: Any = None #: :meta private: model_name: str = DEFAULT_INSTRUCT_MODEL """Model name to use.""" cache_folder: Optional[str] = None @@ -283,7 +283,7 @@ class HuggingFaceBgeEmbeddings(BaseModel, Embeddings): ) """ - client: Any #: :meta private: + client: Any = None #: :meta private: model_name: str = DEFAULT_BGE_MODEL """Model name to use.""" cache_folder: Optional[str] = None diff --git a/libs/community/langchain_community/embeddings/huggingface_hub.py b/libs/community/langchain_community/embeddings/huggingface_hub.py index 33711181a19..b1a1fac3721 100644 --- a/libs/community/langchain_community/embeddings/huggingface_hub.py +++ b/libs/community/langchain_community/embeddings/huggingface_hub.py @@ -35,8 +35,8 @@ class HuggingFaceHubEmbeddings(BaseModel, Embeddings): ) """ - client: Any #: :meta private: - async_client: Any #: :meta private: + client: Any = None #: :meta private: + async_client: Any = None #: :meta private: model: Optional[str] = None """Model name to use.""" repo_id: Optional[str] = None diff --git a/libs/community/langchain_community/llms/huggingface_endpoint.py b/libs/community/langchain_community/llms/huggingface_endpoint.py index e9b4279a7d6..61efcc3f36b 100644 --- a/libs/community/langchain_community/llms/huggingface_endpoint.py +++ b/libs/community/langchain_community/llms/huggingface_endpoint.py @@ -120,8 +120,8 @@ class HuggingFaceEndpoint(LLM): model_kwargs: Dict[str, Any] = Field(default_factory=dict) """Holds any model parameters valid for `call` not explicitly specified""" model: str - client: Any - async_client: Any + client: Any = None + async_client: Any = None task: Optional[str] = None """Task to call the model with. Should be a task that returns `generated_text` or `summary_text`.""" diff --git a/libs/community/langchain_community/llms/huggingface_hub.py b/libs/community/langchain_community/llms/huggingface_hub.py index 7d1472fc5d8..151aa48f2b2 100644 --- a/libs/community/langchain_community/llms/huggingface_hub.py +++ b/libs/community/langchain_community/llms/huggingface_hub.py @@ -43,7 +43,7 @@ class HuggingFaceHub(LLM): hf = HuggingFaceHub(repo_id="gpt2", huggingfacehub_api_token="my-api-key") """ - client: Any #: :meta private: + client: Any = None #: :meta private: repo_id: Optional[str] = None """Model name to use. If not provided, the default model for the chosen task will be used.""" diff --git a/libs/partners/huggingface/langchain_huggingface/embeddings/huggingface_endpoint.py b/libs/partners/huggingface/langchain_huggingface/embeddings/huggingface_endpoint.py index 1c662b68a4d..7d9ecd1ac0b 100644 --- a/libs/partners/huggingface/langchain_huggingface/embeddings/huggingface_endpoint.py +++ b/libs/partners/huggingface/langchain_huggingface/embeddings/huggingface_endpoint.py @@ -30,8 +30,8 @@ class HuggingFaceEndpointEmbeddings(BaseModel, Embeddings): ) """ - client: Any = None - async_client: Any = None + client: Any = None #: :meta private: + async_client: Any = None #: :meta private: model: Optional[str] = None """Model name to use.""" repo_id: Optional[str] = None