ollama[major]: upgrade pydantic (#26044)

This commit is contained in:
ccurme
2024-09-04 13:54:52 -04:00
committed by GitHub
parent 51c6899850
commit be7cd0756f
5 changed files with 45 additions and 43 deletions

View File

@@ -35,11 +35,12 @@ from langchain_core.messages import (
from langchain_core.messages.ai import UsageMetadata
from langchain_core.messages.tool import tool_call
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
from langchain_core.pydantic_v1 import Field, root_validator
from langchain_core.runnables import Runnable
from langchain_core.tools import BaseTool
from langchain_core.utils.function_calling import convert_to_openai_tool
from ollama import AsyncClient, Client, Message, Options
from pydantic import PrivateAttr, model_validator
from typing_extensions import Self
def _get_usage_metadata_from_generation_info(
@@ -328,12 +329,12 @@ class ChatOllama(BaseChatModel):
For a full list of the params, see [this link](https://pydoc.dev/httpx/latest/httpx.Client.html)
"""
_client: Client = Field(default=None)
_client: Client = PrivateAttr(default=None)
"""
The client to use for making requests.
"""
_async_client: AsyncClient = Field(default=None)
_async_client: AsyncClient = PrivateAttr(default=None)
"""
The async client to use for making requests.
"""
@@ -364,14 +365,13 @@ class ChatOllama(BaseChatModel):
"keep_alive": self.keep_alive,
}
@root_validator(pre=False, skip_on_failure=True)
def _set_clients(cls, values: dict) -> dict:
@model_validator(mode="after")
def _set_clients(self) -> Self:
"""Set clients to use for ollama."""
values["_client"] = Client(host=values["base_url"], **values["client_kwargs"])
values["_async_client"] = AsyncClient(
host=values["base_url"], **values["client_kwargs"]
)
return values
client_kwargs = self.client_kwargs or {}
self._client = Client(host=self.base_url, **client_kwargs)
self._async_client = AsyncClient(host=self.base_url, **client_kwargs)
return self
def _convert_messages_to_ollama_messages(
self, messages: List[BaseMessage]