community[patch]: fix huggingface pydantic 2 init (#26286)

This commit is contained in:
ccurme
2024-09-10 18:15:39 -04:00
committed by GitHub
parent 287911adbc
commit e2c8690e7f
5 changed files with 10 additions and 10 deletions

View File

@@ -44,7 +44,7 @@ class HuggingFaceEmbeddings(BaseModel, Embeddings):
) )
""" """
client: Any #: :meta private: client: Any = None #: :meta private:
model_name: str = DEFAULT_MODEL_NAME model_name: str = DEFAULT_MODEL_NAME
"""Model name to use.""" """Model name to use."""
cache_folder: Optional[str] = None cache_folder: Optional[str] = None
@@ -151,7 +151,7 @@ class HuggingFaceInstructEmbeddings(BaseModel, Embeddings):
) )
""" """
client: Any #: :meta private: client: Any = None #: :meta private:
model_name: str = DEFAULT_INSTRUCT_MODEL model_name: str = DEFAULT_INSTRUCT_MODEL
"""Model name to use.""" """Model name to use."""
cache_folder: Optional[str] = None cache_folder: Optional[str] = None
@@ -283,7 +283,7 @@ class HuggingFaceBgeEmbeddings(BaseModel, Embeddings):
) )
""" """
client: Any #: :meta private: client: Any = None #: :meta private:
model_name: str = DEFAULT_BGE_MODEL model_name: str = DEFAULT_BGE_MODEL
"""Model name to use.""" """Model name to use."""
cache_folder: Optional[str] = None cache_folder: Optional[str] = None

View File

@@ -35,8 +35,8 @@ class HuggingFaceHubEmbeddings(BaseModel, Embeddings):
) )
""" """
client: Any #: :meta private: client: Any = None #: :meta private:
async_client: Any #: :meta private: async_client: Any = None #: :meta private:
model: Optional[str] = None model: Optional[str] = None
"""Model name to use.""" """Model name to use."""
repo_id: Optional[str] = None repo_id: Optional[str] = None

View File

@@ -120,8 +120,8 @@ class HuggingFaceEndpoint(LLM):
model_kwargs: Dict[str, Any] = Field(default_factory=dict) model_kwargs: Dict[str, Any] = Field(default_factory=dict)
"""Holds any model parameters valid for `call` not explicitly specified""" """Holds any model parameters valid for `call` not explicitly specified"""
model: str model: str
client: Any client: Any = None
async_client: Any async_client: Any = None
task: Optional[str] = None task: Optional[str] = None
"""Task to call the model with. """Task to call the model with.
Should be a task that returns `generated_text` or `summary_text`.""" Should be a task that returns `generated_text` or `summary_text`."""

View File

@@ -43,7 +43,7 @@ class HuggingFaceHub(LLM):
hf = HuggingFaceHub(repo_id="gpt2", huggingfacehub_api_token="my-api-key") hf = HuggingFaceHub(repo_id="gpt2", huggingfacehub_api_token="my-api-key")
""" """
client: Any #: :meta private: client: Any = None #: :meta private:
repo_id: Optional[str] = None repo_id: Optional[str] = None
"""Model name to use. """Model name to use.
If not provided, the default model for the chosen task will be used.""" If not provided, the default model for the chosen task will be used."""

View File

@@ -30,8 +30,8 @@ class HuggingFaceEndpointEmbeddings(BaseModel, Embeddings):
) )
""" """
client: Any = None client: Any = None #: :meta private:
async_client: Any = None async_client: Any = None #: :meta private:
model: Optional[str] = None model: Optional[str] = None
"""Model name to use.""" """Model name to use."""
repo_id: Optional[str] = None repo_id: Optional[str] = None