From 4867fe7ac8b05779ba8ead9151ab99622938b0c1 Mon Sep 17 00:00:00 2001 From: TheSongg <145535169+TheSongg@users.noreply.github.com> Date: Wed, 15 Jan 2025 23:11:26 +0800 Subject: [PATCH] [langchain_community.llms.xinference]: fix error in xinference.py (#29216) - [ ] **PR title**: [langchain_community.llms.xinference]: fix error in xinference.py - [ ] **PR message**: - The old code raised an ValidationError: pydantic_core._pydantic_core.ValidationError: 1 validation error for Xinference when import Xinference from xinference.py. This issue has been resolved by adjusting it's type and default value. File "/media/vdc/python/lib/python3.10/site-packages/pydantic/main.py", line 212, in __init__ validated_self = self.__pydantic_validator__.validate_python(data, self_instance=self) pydantic_core._pydantic_core.ValidationError: 1 validation error for Xinference client Field required [type=missing, input_value={'server_url': 'http://10...t4', 'model_kwargs': {}}, input_type=dict] For further information visit https://errors.pydantic.dev/2.9/v/missing - [ ] **tests**: from langchain_community.llms import Xinference llm = Xinference( server_url="http://0.0.0.0:9997", # replace your xinference server url model_uid={model_uid} # replace model_uid with the model UID return from launching the model ) --- libs/community/langchain_community/llms/xinference.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/libs/community/langchain_community/llms/xinference.py b/libs/community/langchain_community/llms/xinference.py index 8a6cb2ee56d..ada9f8b1f10 100644 --- a/libs/community/langchain_community/llms/xinference.py +++ b/libs/community/langchain_community/llms/xinference.py @@ -81,7 +81,7 @@ class Xinference(LLM): """ # noqa: E501 - client: Any + client: Optional[Any] = None server_url: Optional[str] """URL of the xinference server""" model_uid: Optional[str] @@ -156,6 +156,8 @@ class Xinference(LLM): Returns: The generated string by the model. """ + if self.client is None: + raise ValueError("Client is not initialized!") model = self.client.get_model(self.model_uid) generate_config: "LlamaCppGenerateConfig" = kwargs.get("generate_config", {})